11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
767 const VALUE *ep = cfp->ep;
770 while (!VM_ENV_LOCAL_P(ep)) {
771 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
772 ep = VM_ENV_PREV_EP(ep);
775 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
781 switch (me->def->type) {
782 case VM_METHOD_TYPE_ISEQ:
783 return me->def->body.iseq.
iseqptr;
792 switch (me->def->type) {
793 case VM_METHOD_TYPE_ISEQ:
794 return me->def->body.iseq.
cref;
800#if VM_CHECK_MODE == 0
804check_cref(
VALUE obj,
int can_be_svar)
806 if (obj ==
Qfalse)
return NULL;
809 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
812 switch (imemo_type(obj)) {
823 rb_bug(
"check_method_entry: svar should not be there:");
830vm_env_cref(
const VALUE *ep)
834 while (!VM_ENV_LOCAL_P(ep)) {
835 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
836 ep = VM_ENV_PREV_EP(ep);
839 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
843is_cref(
const VALUE v,
int can_be_svar)
846 switch (imemo_type(v)) {
859vm_env_cref_by_cref(
const VALUE *ep)
861 while (!VM_ENV_LOCAL_P(ep)) {
862 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
863 ep = VM_ENV_PREV_EP(ep);
865 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
869cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
871 const VALUE v = *vptr;
875 switch (imemo_type(v)) {
878 new_cref = vm_cref_dup(cref);
883 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
888 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
892 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
901vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
903 if (vm_env_cref_by_cref(ep)) {
907 while (!VM_ENV_LOCAL_P(ep)) {
908 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
909 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
912 ep = VM_ENV_PREV_EP(ep);
914 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
915 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
918 rb_bug(
"vm_cref_dup: unreachable");
923vm_get_cref(
const VALUE *ep)
931 rb_bug(
"vm_get_cref: unreachable");
936rb_vm_get_cref(
const VALUE *ep)
938 return vm_get_cref(ep);
949 return vm_get_cref(cfp->ep);
953vm_get_const_key_cref(
const VALUE *ep)
959 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
960 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
963 cref = CREF_NEXT(cref);
976 #define ADD_NEW_CREF(new_cref) \
977 if (new_cref_tail) { \
978 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
981 new_cref_head = new_cref; \
983 new_cref_tail = new_cref;
987 if (CREF_CLASS(cref) == old_klass) {
988 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
989 ADD_NEW_CREF(new_cref);
990 return new_cref_head;
992 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
993 cref = CREF_NEXT(cref);
994 ADD_NEW_CREF(new_cref);
1000 return new_cref_head;
1009 prev_cref = vm_env_cref(ep);
1015 prev_cref = vm_env_cref(cfp->ep);
1019 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1023vm_get_cbase(
const VALUE *ep)
1025 const rb_cref_t *cref = vm_get_cref(ep);
1027 return CREF_CLASS_FOR_DEFINITION(cref);
1031vm_get_const_base(
const VALUE *ep)
1033 const rb_cref_t *cref = vm_get_cref(ep);
1036 if (!CREF_PUSHED_BY_EVAL(cref)) {
1037 return CREF_CLASS_FOR_DEFINITION(cref);
1039 cref = CREF_NEXT(cref);
1046vm_check_if_namespace(
VALUE klass)
1049 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1054vm_ensure_not_refinement_module(
VALUE self)
1057 rb_warn(
"not defined at the refinement, but at the outer class/module");
1073 if (
NIL_P(orig_klass) && allow_nil) {
1075 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1079 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1080 root_cref = CREF_NEXT(root_cref);
1083 while (cref && CREF_NEXT(cref)) {
1084 if (CREF_PUSHED_BY_EVAL(cref)) {
1088 klass = CREF_CLASS(cref);
1090 cref = CREF_NEXT(cref);
1092 if (!
NIL_P(klass)) {
1096 if ((ce = rb_const_lookup(klass,
id))) {
1097 rb_const_warn_if_deprecated(ce, klass,
id);
1100 if (am == klass)
break;
1102 if (is_defined)
return 1;
1103 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1105 goto search_continue;
1112 if (UNLIKELY(!rb_ractor_main_p())) {
1114 rb_raise(rb_eRactorIsolationError,
1115 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1126 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1127 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1141 vm_check_if_namespace(orig_klass);
1143 return rb_public_const_defined_from(orig_klass,
id);
1146 return rb_public_const_get_from(orig_klass,
id);
1154 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1162 int allow_nil = TRUE;
1163 if (segments[0] == idNULL) {
1168 while (segments[idx]) {
1169 ID id = segments[idx++];
1170 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1183 rb_bug(
"vm_get_cvar_base: no cref");
1186 while (CREF_NEXT(cref) &&
1187 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1188 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1189 cref = CREF_NEXT(cref);
1191 if (top_level_raise && !CREF_NEXT(cref)) {
1195 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1203ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1205fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1208 vm_cc_attr_index_set(cc, index, shape_id);
1211 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1215#define ractor_incidental_shareable_p(cond, val) \
1216 (!(cond) || rb_ractor_shareable_p(val))
1217#define ractor_object_incidental_shareable_p(obj, val) \
1218 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1227 return default_value;
1237 if (UNLIKELY(!rb_ractor_main_p())) {
1245 if (default_value ==
Qundef) {
1253 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1257 fields_obj = rb_obj_fields(obj,
id);
1261 return default_value;
1266 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1267 VALUE *ivar_list = rb_imemo_fields_ptr(fields_obj);
1269 shape_id_t cached_id;
1273 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1276 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1279 if (LIKELY(cached_id == shape_id)) {
1282 if (index == ATTR_INDEX_NOT_SET) {
1283 return default_value;
1286 val = ivar_list[index];
1287#if USE_DEBUG_COUNTER
1288 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1291 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1297#if USE_DEBUG_COUNTER
1299 if (cached_id != INVALID_SHAPE_ID) {
1300 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1303 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1307 if (cached_id != INVALID_SHAPE_ID) {
1308 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1311 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1314 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1317 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1321 if (UNLIKELY(rb_shape_too_complex_p(shape_id))) {
1325 RUBY_ASSERT(table == rb_imemo_fields_complex_tbl(fields_obj));
1327 if (!st_lookup(table,
id, &val)) {
1328 val = default_value;
1332 shape_id_t previous_cached_id = cached_id;
1333 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1336 if (cached_id != previous_cached_id) {
1337 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1340 if (index == ATTR_INDEX_NOT_SET) {
1341 val = default_value;
1345 val = ivar_list[index];
1351 vm_cc_attr_index_initialize(cc, shape_id);
1354 vm_ic_attr_index_initialize(ic, shape_id);
1357 val = default_value;
1362 if (!UNDEF_P(default_value)) {
1370 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1373 return rb_attr_get(obj,
id);
1381populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1383 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1387 vm_cc_attr_index_set(cc, index, next_shape_id);
1390 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1402 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1404 rb_check_frozen(obj);
1406 attr_index_t index = rb_ivar_set_index(obj,
id, val);
1407 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1409 if (!rb_shape_too_complex_p(next_shape_id)) {
1410 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1413 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1423 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1429 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1432NOINLINE(
static VALUE vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1434vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1436 if (UNLIKELY(!rb_ractor_main_p())) {
1440 VALUE fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1441 if (UNLIKELY(!fields_obj)) {
1445 shape_id_t shape_id = RBASIC_SHAPE_ID(fields_obj);
1448 if (shape_id == dest_shape_id) {
1449 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1451 else if (dest_shape_id != INVALID_SHAPE_ID) {
1452 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1453 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1463 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1465 if (shape_id != dest_shape_id) {
1466 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1467 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1470 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1475NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1477vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1479 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1482 if (shape_id == dest_shape_id) {
1483 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1485 else if (dest_shape_id != INVALID_SHAPE_ID) {
1486 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1487 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1497 VALUE fields_obj = rb_obj_fields(obj,
id);
1499 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1501 if (shape_id != dest_shape_id) {
1502 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1503 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1506 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1512vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1520 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1521 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1523 if (LIKELY(shape_id == dest_shape_id)) {
1524 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1527 else if (dest_shape_id != INVALID_SHAPE_ID) {
1528 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1529 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1531 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1533 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1548 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1549 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1555 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1567 VALUE defined_class = 0;
1571 defined_class =
RBASIC(defined_class)->klass;
1574 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1576 rb_bug(
"the cvc table should be set");
1580 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1581 rb_bug(
"should have cvar cache entry");
1586 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1602 cref = vm_get_cref(GET_EP());
1604 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1605 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1607 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1613 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1615 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1621 return vm_getclassvariable(iseq, cfp,
id, ic);
1628 cref = vm_get_cref(GET_EP());
1630 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1631 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1633 rb_class_ivar_set(ic->entry->class_value,
id, val);
1637 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1641 update_classvariable_cache(iseq, klass,
id, cref, ic);
1647 vm_setclassvariable(iseq, cfp,
id, val, ic);
1653 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1664 shape_id_t dest_shape_id;
1666 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1668 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1674 if (!UNDEF_P(vm_setivar_class(obj,
id, val, dest_shape_id, index))) {
1679 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1683 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1690 vm_setinstancevariable(iseq, obj,
id, val, ic);
1699 ec->tag->state = RUBY_TAG_FATAL;
1702 ec->tag->state = TAG_THROW;
1704 else if (THROW_DATA_P(err)) {
1705 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1708 ec->tag->state = TAG_RAISE;
1715 const int flag,
const VALUE throwobj)
1723 else if (state == TAG_BREAK) {
1725 const VALUE *ep = GET_EP();
1726 const rb_iseq_t *base_iseq = GET_ISEQ();
1727 escape_cfp = reg_cfp;
1729 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1730 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1731 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1732 ep = escape_cfp->ep;
1733 base_iseq = escape_cfp->iseq;
1736 ep = VM_ENV_PREV_EP(ep);
1737 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1738 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1739 VM_ASSERT(escape_cfp->iseq == base_iseq);
1743 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1749 ep = VM_ENV_PREV_EP(ep);
1751 while (escape_cfp < eocfp) {
1752 if (escape_cfp->ep == ep) {
1753 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1754 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1759 for (i=0; i < ct->size; i++) {
1761 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1763 if (entry->type == CATCH_TYPE_BREAK &&
1764 entry->iseq == base_iseq &&
1765 entry->start < epc && entry->end >= epc) {
1766 if (entry->cont == epc) {
1775 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1780 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1783 else if (state == TAG_RETRY) {
1784 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1786 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1788 else if (state == TAG_RETURN) {
1789 const VALUE *current_ep = GET_EP();
1790 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1791 int in_class_frame = 0;
1793 escape_cfp = reg_cfp;
1796 while (!VM_ENV_LOCAL_P(ep)) {
1797 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1800 ep = VM_ENV_PREV_EP(ep);
1804 while (escape_cfp < eocfp) {
1805 const VALUE *lep = VM_CF_LEP(escape_cfp);
1811 if (lep == target_lep &&
1812 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1813 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1818 if (lep == target_lep) {
1819 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1821 if (in_class_frame) {
1826 const VALUE *tep = current_ep;
1828 while (target_lep != tep) {
1829 if (escape_cfp->ep == tep) {
1831 if (tep == target_ep) {
1835 goto unexpected_return;
1838 tep = VM_ENV_PREV_EP(tep);
1842 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1843 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1845 case ISEQ_TYPE_MAIN:
1847 if (in_class_frame)
goto unexpected_return;
1848 if (target_ep == NULL) {
1852 goto unexpected_return;
1856 case ISEQ_TYPE_EVAL: {
1858 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1859 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1860 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1861 t = ISEQ_BODY(is)->type;
1863 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1866 case ISEQ_TYPE_CLASS:
1875 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1876 if (target_ep == NULL) {
1880 goto unexpected_return;
1884 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1887 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1893 rb_bug(
"isns(throw): unsupported throw type");
1896 ec->tag->state = state;
1897 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1902 rb_num_t throw_state,
VALUE throwobj)
1904 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1905 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1908 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1911 return vm_throw_continue(ec, throwobj);
1918 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1924 int is_splat = flag & 0x01;
1927 const VALUE obj = ary;
1939 if (num + is_splat == 0) {
1942 else if (flag & 0x02) {
1947 for (i = 0; i < num -
len; i++) {
1952 for (j = 0; i < num; i++, j++) {
1974 for (; i < num -
len; i++) {
1978 for (rb_num_t j = 0; i < num; i++, j++) {
1979 *cfp->sp++ = ptr[
len - j - 1];
1983 for (rb_num_t j = 0; j < num; j++) {
1984 *cfp->sp++ = ptr[num - j - 1];
1999 int initial_capa = 2;
2001#if VM_CHECK_MODE > 0
2002 ccs->debug_sig = ~(
VALUE)ccs;
2004 ccs->capa = initial_capa;
2009 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2017 if (! vm_cc_markable(cc)) {
2021 if (UNLIKELY(ccs->len == ccs->capa)) {
2024 ccs = ruby_xrealloc(ccs, vm_ccs_alloc_size(ccs->capa));
2025#if VM_CHECK_MODE > 0
2026 ccs->debug_sig = ~(
VALUE)ccs;
2029 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2031 VM_ASSERT(ccs->len < ccs->capa);
2033 const int pos = ccs->len++;
2034 ccs->entries[pos].argc = vm_ci_argc(ci);
2035 ccs->entries[pos].flag = vm_ci_flag(ci);
2038 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2044#if VM_CHECK_MODE > 0
2048 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2049 for (
int i=0; i<ccs->len; i++) {
2050 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2051 ccs->entries[i].flag,
2052 ccs->entries[i].argc);
2053 rp(ccs->entries[i].cc);
2060 VM_ASSERT(vm_ccs_p(ccs));
2061 VM_ASSERT(ccs->len <= ccs->capa);
2063 for (
int i=0; i<ccs->len; i++) {
2066 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2067 VM_ASSERT(vm_cc_class_check(cc, klass));
2068 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2069 VM_ASSERT(!vm_cc_super_p(cc));
2070 VM_ASSERT(!vm_cc_refinement_p(cc));
2081 ASSERT_vm_locking();
2083 if (rb_multi_ractor_p()) {
2084 if (RCLASS_WRITABLE_CC_TBL(klass) != cc_tbl) {
2091 rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs);
2093 if (!ccs || !METHOD_ENTRY_INVALIDATED(ccs->cme)) {
2098 VALUE new_table = rb_vm_cc_table_dup(cc_tbl);
2099 rb_vm_cc_table_delete(new_table, mid);
2100 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), new_table);
2103 rb_vm_cc_table_delete(cc_tbl, mid);
2110 ASSERT_vm_locking();
2112 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2113 const VALUE original_cc_table = cc_tbl;
2117 cc_tbl = rb_vm_cc_table_create(1);
2119 else if (rb_multi_ractor_p()) {
2120 cc_tbl = rb_vm_cc_table_dup(cc_tbl);
2123 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2129 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2131 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2134 cme = rb_callable_method_entry(klass, mid);
2137 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2141 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2142 return &vm_empty_cc;
2145 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2152 if (!LIKELY(rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs))) {
2154 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2158 cme = rb_check_overloaded_cme(cme, ci);
2160 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2161 vm_ccs_push(cc_tbl, mid, ccs, ci, cc);
2163 VM_ASSERT(vm_cc_cme(cc) != NULL);
2164 VM_ASSERT(cme->called_id == mid);
2165 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2167 if (original_cc_table != cc_tbl) {
2168 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), cc_tbl);
2180 cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
2187 if (rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs)) {
2188 const int ccs_len = ccs->len;
2190 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2192 vm_evict_cc(klass, cc_tbl, mid);
2197 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2202 unsigned int argc = vm_ci_argc(ci);
2203 unsigned int flag = vm_ci_flag(ci);
2205 for (
int i=0; i<ccs_len; i++) {
2206 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2207 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2208 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2210 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2212 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2213 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2215 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2216 VM_ASSERT(ccs_cc->klass == klass);
2217 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2233 const ID mid = vm_ci_mid(ci);
2235 const struct rb_callcache *cc = vm_lookup_cc(klass, ci, mid);
2241 if (rb_multi_ractor_p()) {
2244 cc = vm_lookup_cc(klass, ci, mid);
2248 cc = vm_populate_cc(klass, ci, mid);
2262 cc = vm_search_cc(klass, ci);
2265 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2266 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2267 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2268 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2269 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2277#if USE_DEBUG_COUNTER
2281 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2283#if OPT_INLINE_METHOD_CACHE
2287 if (cd_owner && cc != empty_cc) {
2291#if USE_DEBUG_COUNTER
2292 if (!old_cc || old_cc == empty_cc) {
2294 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2296 else if (old_cc == cc) {
2297 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2299 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2300 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2302 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2303 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2304 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2307 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2312 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2313 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2324#if OPT_INLINE_METHOD_CACHE
2325 if (LIKELY(vm_cc_class_check(cc, klass))) {
2326 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2327 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2328 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2329 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2330 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2331 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2335 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2338 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2342 return vm_search_method_slowpath0(cd_owner, cd, klass);
2349 VM_ASSERT(klass !=
Qfalse);
2352 const struct rb_callcache *cc = vm_search_method_fastpath(cd_owner, cd, klass);
2353 return vm_cc_cme(cc);
2356#if __has_attribute(transparent_union)
2369 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2370 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2371 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2372 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2373 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2374 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2377# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2380# define make_cfunc_type(f) (cfunc_type)(f)
2390 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2391 VM_ASSERT(callable_method_entry_p(me));
2393 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2397#if __has_attribute(transparent_union)
2398 return me->def->body.cfunc.func == func.anyargs;
2400 return me->def->body.cfunc.func == func;
2409 return me && METHOD_ENTRY_BASIC(me);
2415 VM_ASSERT(iseq != NULL);
2417 return check_cfunc(cme, func);
2423 return vm_method_cfunc_is(iseq, cd, recv, func);
2426#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2427#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2429#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2461opt_equality_specialized(
VALUE recv,
VALUE obj)
2463 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2464 goto compare_by_identity;
2466 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2467 goto compare_by_identity;
2470 goto compare_by_identity;
2479#if MSC_VERSION_BEFORE(1300)
2483 else if (isnan(b)) {
2488 return RBOOL(a == b);
2495 return rb_str_eql_internal(obj, recv);
2500 compare_by_identity:
2501 return RBOOL(recv == obj);
2507 VM_ASSERT(cd_owner != NULL);
2509 VALUE val = opt_equality_specialized(recv, obj);
2510 if (!UNDEF_P(val))
return val;
2512 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2516 return RBOOL(recv == obj);
2520#undef EQ_UNREDEFINED_P
2523NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2526opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2528 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2530 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2531 return RBOOL(recv == obj);
2541 VALUE val = opt_equality_specialized(recv, obj);
2542 if (!UNDEF_P(val)) {
2546 return opt_equality_by_mid_slowpath(recv, obj, mid);
2553 return opt_equality_by_mid(obj1, obj2, idEq);
2559 return opt_equality_by_mid(obj1, obj2, idEqlP);
2569 case VM_CHECKMATCH_TYPE_WHEN:
2571 case VM_CHECKMATCH_TYPE_RESCUE:
2573 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2576 case VM_CHECKMATCH_TYPE_CASE: {
2577 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2580 rb_bug(
"check_match: unreachable");
2585#if MSC_VERSION_BEFORE(1300)
2586#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2588#define CHECK_CMP_NAN(a, b)
2592double_cmp_lt(
double a,
double b)
2594 CHECK_CMP_NAN(a, b);
2595 return RBOOL(a < b);
2599double_cmp_le(
double a,
double b)
2601 CHECK_CMP_NAN(a, b);
2602 return RBOOL(a <= b);
2606double_cmp_gt(
double a,
double b)
2608 CHECK_CMP_NAN(a, b);
2609 return RBOOL(a > b);
2613double_cmp_ge(
double a,
double b)
2615 CHECK_CMP_NAN(a, b);
2616 return RBOOL(a >= b);
2620static inline VALUE *
2625 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2626 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2628 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2629 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2630 int params = ISEQ_BODY(cfp->iseq)->param.size;
2633 bp += vm_ci_argc(ci);
2636 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2640#if VM_DEBUG_BP_CHECK
2641 if (bp != cfp->bp_check) {
2642 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2643 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2644 (
long)(bp - GET_EC()->vm_stack));
2645 rb_bug(
"vm_base_ptr: unreachable");
2658 return vm_base_ptr(cfp);
2673static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2678 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2680 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2686 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2689 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2690 int param = ISEQ_BODY(iseq)->param.size;
2691 int local = ISEQ_BODY(iseq)->local_table_size;
2692 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2698 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2699 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2700 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2701 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2702 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2703 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2704 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2705 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2709rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2711 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2712 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2713 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2714 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2715 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2716 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2717 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2718 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2722rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2724 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2725 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2726 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2727 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2728 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2729 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2730 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2733#define ALLOW_HEAP_ARGV (-2)
2734#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2739 vm_check_canary(GET_EC(), cfp->sp);
2745 int argc = calling->argc;
2747 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2751 VALUE *argv = cfp->sp - argc;
2755 cfp->sp -= argc - 1;
2756 cfp->sp[-1] = argv_ary;
2758 calling->heap_argv = argv_ary;
2764 if (max_args >= 0 &&
len + argc > max_args) {
2772 calling->argc +=
len - (max_args - argc + 1);
2773 len = max_args - argc + 1;
2782 calling->heap_argv = 0;
2784 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2786 for (i = 0; i <
len; i++) {
2787 *cfp->sp++ = ptr[i];
2799 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2800 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2801 const VALUE h = rb_hash_new_with_size(kw_len);
2802 VALUE *sp = cfp->sp;
2805 for (i=0; i<kw_len; i++) {
2806 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2810 cfp->sp -= kw_len - 1;
2811 calling->argc -= kw_len - 1;
2812 calling->kw_splat = 1;
2816vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2819 if (keyword_hash !=
Qnil) {
2821 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2824 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2830 keyword_hash = rb_hash_dup(keyword_hash);
2832 return keyword_hash;
2838 const struct rb_callinfo *restrict ci,
int max_args)
2840 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2841 if (IS_ARGS_KW_SPLAT(ci)) {
2843 VM_ASSERT(calling->kw_splat == 1);
2847 VALUE ary = cfp->sp[0];
2848 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2851 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2855 if (UNLIKELY(calling->heap_argv)) {
2857 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2858 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2859 calling->kw_splat = 0;
2867 VM_ASSERT(calling->kw_splat == 1);
2871 calling->kw_splat = 0;
2876 VM_ASSERT(calling->kw_splat == 0);
2880 VALUE ary = cfp->sp[0];
2882 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2887 VALUE last_hash, argv_ary;
2888 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2889 if (!IS_ARGS_KEYWORD(ci) &&
2892 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2897 calling->kw_splat = 1;
2903 if (!IS_ARGS_KEYWORD(ci) &&
2904 calling->argc > 0 &&
2906 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2913 cfp->sp[-1] = rb_hash_dup(last_hash);
2914 calling->kw_splat = 1;
2920 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2922 VM_ASSERT(calling->kw_splat == 1);
2923 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2928 calling->kw_splat = 0;
2934 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2936 VM_ASSERT(calling->kw_splat == 0);
2942 vm_caller_setup_arg_kw(cfp, calling, ci);
2946#define USE_OPT_HIST 0
2949#define OPT_HIST_MAX 64
2950static int opt_hist[OPT_HIST_MAX+1];
2954opt_hist_show_results_at_exit(
void)
2956 for (
int i=0; i<OPT_HIST_MAX; i++) {
2957 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2967 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2968 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2969 const int opt = calling->argc - lead_num;
2970 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2971 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2972 const int param = ISEQ_BODY(iseq)->param.size;
2973 const int local = ISEQ_BODY(iseq)->local_table_size;
2974 const int delta = opt_num - opt;
2976 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2979 if (opt_pc < OPT_HIST_MAX) {
2983 opt_hist[OPT_HIST_MAX]++;
2987 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2995 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2996 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2997 const int opt = calling->argc - lead_num;
2998 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3000 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3003 if (opt_pc < OPT_HIST_MAX) {
3007 opt_hist[OPT_HIST_MAX]++;
3011 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3016 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
3017 VALUE *
const locals);
3024 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3025 int param_size = ISEQ_BODY(iseq)->param.size;
3026 int local_size = ISEQ_BODY(iseq)->local_table_size;
3029 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3031 local_size = local_size + vm_ci_argc(calling->cd->ci);
3032 param_size = param_size + vm_ci_argc(calling->cd->ci);
3034 cfp->sp[0] = (
VALUE)calling->cd->ci;
3036 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
3046 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
3047 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
3049 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3050 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3052 const int ci_kw_len = kw_arg->keyword_len;
3053 const VALUE *
const ci_keywords = kw_arg->keywords;
3054 VALUE *argv = cfp->sp - calling->argc;
3055 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3056 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3058 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3059 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3061 int param = ISEQ_BODY(iseq)->param.size;
3062 int local = ISEQ_BODY(iseq)->local_table_size;
3063 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3070 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
3073 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
3074 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3076 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3077 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3078 VALUE *
const argv = cfp->sp - calling->argc;
3079 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3082 for (i=0; i<kw_param->num; i++) {
3083 klocals[i] = kw_param->default_values[i];
3090 int param = ISEQ_BODY(iseq)->param.size;
3091 int local = ISEQ_BODY(iseq)->local_table_size;
3092 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3102 cfp->sp -= (calling->argc + 1);
3103 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3104 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3113 set_table *dup_check_table = vm->unused_block_warning_table;
3123 .v = (
VALUE)cme->def,
3127 if (!strict_unused_block) {
3128 key = (st_data_t)cme->def->original_id;
3130 if (set_table_lookup(dup_check_table, key)) {
3140 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3145 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3146 fprintf(stderr,
"key:%p\n", (
void *)key);
3150 if (set_insert(dup_check_table, key)) {
3155 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3157 if (!
NIL_P(m_loc)) {
3158 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3162 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3169 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3174 VM_ASSERT((vm_ci_argc(ci), 1));
3175 VM_ASSERT(vm_cc_cme(cc) != NULL);
3177 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3178 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3179 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3180 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3183 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3184 if (LIKELY(rb_simple_iseq_p(iseq))) {
3186 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3187 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3189 if (calling->argc != lead_num) {
3190 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3194 VM_ASSERT(cc == calling->cc);
3196 if (vm_call_iseq_optimizable_p(ci, cc)) {
3197 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3199 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3200 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3201 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3204 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3209 else if (rb_iseq_only_optparam_p(iseq)) {
3212 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3213 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3215 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3216 const int argc = calling->argc;
3217 const int opt = argc - lead_num;
3219 if (opt < 0 || opt > opt_num) {
3220 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3223 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3224 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3225 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3226 vm_call_cacheable(ci, cc));
3229 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3230 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3231 vm_call_cacheable(ci, cc));
3235 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3236 for (
int i=argc; i<lead_num + opt_num; i++) {
3239 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3241 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3242 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3243 const int argc = calling->argc;
3244 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3246 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3249 if (argc - kw_arg->keyword_len == lead_num) {
3250 const int ci_kw_len = kw_arg->keyword_len;
3251 const VALUE *
const ci_keywords = kw_arg->keywords;
3253 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3255 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3256 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3258 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3259 vm_call_cacheable(ci, cc));
3264 else if (argc == lead_num) {
3266 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3267 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3269 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3271 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3272 vm_call_cacheable(ci, cc));
3298 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3299 bool can_fastpath =
true;
3301 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3303 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3304 ci = vm_ci_new_runtime(
3311 ci = forward_cd->caller_ci;
3313 can_fastpath =
false;
3317 if (!vm_ci_markable(ci)) {
3318 ci = vm_ci_new_runtime(
3323 can_fastpath =
false;
3325 argv[param_size - 1] = (
VALUE)ci;
3326 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3330 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3357 const VALUE * lep = VM_CF_LEP(cfp);
3363 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3368 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3372 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3374 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3375 VALUE * to = cfp->sp - 1;
3379 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3384 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3386 cfp->sp = to + argc;
3405 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3408 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3409 int param_size = ISEQ_BODY(iseq)->param.size;
3410 int local_size = ISEQ_BODY(iseq)->local_table_size;
3412 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3414 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3415 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3421 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3424 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3425 int param_size = ISEQ_BODY(iseq)->param.size;
3426 int local_size = ISEQ_BODY(iseq)->local_table_size;
3428 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3431 local_size = local_size + vm_ci_argc(calling->cd->ci);
3432 param_size = param_size + vm_ci_argc(calling->cd->ci);
3434 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3435 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3440 int opt_pc,
int param_size,
int local_size)
3445 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3446 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3449 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3455 int opt_pc,
int param_size,
int local_size)
3457 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3458 VALUE *argv = cfp->sp - calling->argc;
3459 VALUE *sp = argv + param_size;
3460 cfp->sp = argv - 1 ;
3462 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3463 calling->block_handler, (
VALUE)me,
3464 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3465 local_size - param_size,
3466 ISEQ_BODY(iseq)->stack_max);
3475 VALUE *argv = cfp->sp - calling->argc;
3477 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3478 VALUE *src_argv = argv;
3479 VALUE *sp_orig, *sp;
3480 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3482 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3483 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3484 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3485 dst_captured->code.val = src_captured->code.val;
3486 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3487 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3490 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3494 vm_pop_frame(ec, cfp, cfp->ep);
3497 sp_orig = sp = cfp->sp;
3500 sp[0] = calling->recv;
3504 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3505 *sp++ = src_argv[i];
3508 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3509 calling->recv, calling->block_handler, (
VALUE)me,
3510 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3511 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3512 ISEQ_BODY(iseq)->stack_max);
3520ractor_unsafe_check(
void)
3522 if (!rb_ractor_main_p()) {
3523 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3530 ractor_unsafe_check();
3538 ractor_unsafe_check();
3540 return (*f)(argc, argv, recv);
3546 ractor_unsafe_check();
3554 ractor_unsafe_check();
3556 return (*f)(recv, argv[0]);
3562 ractor_unsafe_check();
3564 return (*f)(recv, argv[0], argv[1]);
3570 ractor_unsafe_check();
3572 return (*f)(recv, argv[0], argv[1], argv[2]);
3578 ractor_unsafe_check();
3580 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3586 ractor_unsafe_check();
3587 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3588 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3594 ractor_unsafe_check();
3595 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3596 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3602 ractor_unsafe_check();
3603 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3604 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3610 ractor_unsafe_check();
3611 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3612 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3618 ractor_unsafe_check();
3619 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3620 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3626 ractor_unsafe_check();
3627 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3628 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3634 ractor_unsafe_check();
3635 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3636 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3642 ractor_unsafe_check();
3643 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3644 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3650 ractor_unsafe_check();
3651 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3652 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3658 ractor_unsafe_check();
3659 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3660 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3666 ractor_unsafe_check();
3667 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3668 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3682 return (*f)(argc, argv, recv);
3696 return (*f)(recv, argv[0]);
3703 return (*f)(recv, argv[0], argv[1]);
3710 return (*f)(recv, argv[0], argv[1], argv[2]);
3717 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3723 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3724 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3730 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3731 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3737 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3738 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3744 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3745 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3751 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3752 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3758 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3759 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3765 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3766 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3772 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3773 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3779 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3780 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3786 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3787 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3793 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3794 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3800 const int ov_flags = RAISED_STACKOVERFLOW;
3801 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3802 if (rb_ec_raised_p(ec, ov_flags)) {
3803 rb_ec_raised_reset(ec, ov_flags);
3809#define CHECK_CFP_CONSISTENCY(func) \
3810 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3811 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3817#if VM_DEBUG_VERIFY_METHOD_CACHE
3818 switch (me->def->type) {
3819 case VM_METHOD_TYPE_CFUNC:
3820 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3822# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3824 METHOD_BUG(ATTRSET);
3826 METHOD_BUG(BMETHOD);
3829 METHOD_BUG(OPTIMIZED);
3830 METHOD_BUG(MISSING);
3831 METHOD_BUG(REFINED);
3835 rb_bug(
"wrong method type: %d", me->def->type);
3838 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3845 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3852 VALUE recv = calling->recv;
3853 VALUE block_handler = calling->block_handler;
3854 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3856 if (UNLIKELY(calling->kw_splat)) {
3857 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3860 VM_ASSERT(reg_cfp == ec->cfp);
3862 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3865 vm_push_frame(ec, NULL, frame_type, recv,
3866 block_handler, (
VALUE)me,
3867 0, ec->cfp->sp, 0, 0);
3869 int len = cfunc->argc;
3872 reg_cfp->sp = stack_bottom;
3873 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3875 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3877 rb_vm_pop_frame(ec);
3879 VM_ASSERT(ec->cfp->sp == stack_bottom);
3881 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3882 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3892 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3894 VALUE *sp = ec->cfp->sp;
3895 VALUE recv = *(sp - recv_idx - 1);
3896 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3897 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3898#if VM_CHECK_MODE > 0
3900 *(GET_EC()->cfp->sp) =
Qfalse;
3902 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3907rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3909 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3915 int argc = calling->argc;
3916 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3917 VALUE *argv = &stack_bottom[1];
3919 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3926 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3928 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3930 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3931 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3934 VALUE *stack_bottom = reg_cfp->sp - 2;
3936 VM_ASSERT(calling->argc == 1);
3940 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3943 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3945 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3952 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3955 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3956 return vm_call_cfunc_other(ec, reg_cfp, calling);
3960 calling->kw_splat = 0;
3962 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3963 VALUE *sp = stack_bottom;
3964 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3965 for(i = 0; i < argc; i++) {
3970 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3976 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3977 VALUE argv_ary = reg_cfp->sp[-1];
3981 int argc_offset = 0;
3983 if (UNLIKELY(argc > 0 &&
3985 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3987 return vm_call_cfunc_other(ec, reg_cfp, calling);
3991 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3997 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3998 VALUE keyword_hash = reg_cfp->sp[-1];
4001 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
4004 return vm_call_cfunc_other(ec, reg_cfp, calling);
4011 RB_DEBUG_COUNTER_INC(ccf_cfunc);
4013 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4014 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
4016 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
4017 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
4019 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
4021 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
4022 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
4026 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
4027 return vm_call_cfunc_other(ec, reg_cfp, calling);
4034 RB_DEBUG_COUNTER_INC(ccf_ivar);
4036 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
4043 RB_DEBUG_COUNTER_INC(ccf_attrset);
4044 VALUE val = *(cfp->sp - 1);
4047 shape_id_t dest_shape_id;
4048 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
4049 ID id = vm_cc_cme(cc)->def->body.attr.id;
4050 rb_check_frozen(obj);
4051 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
4059 res = vm_setivar_class(obj,
id, val, dest_shape_id, index);
4060 if (!UNDEF_P(res)) {
4067 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
4068 if (!UNDEF_P(res)) {
4073 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
4081 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4091 VALUE procv = cme->def->body.bmethod.proc;
4094 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4095 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4099 GetProcPtr(procv, proc);
4100 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4110 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4114 VALUE procv = cme->def->body.bmethod.proc;
4117 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4118 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4122 GetProcPtr(procv, proc);
4123 const struct rb_block *block = &proc->block;
4125 while (vm_block_type(block) == block_type_proc) {
4126 block = vm_proc_block(block->as.proc);
4128 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4131 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4132 VALUE *
const argv = cfp->sp - calling->argc;
4133 const int arg_size = ISEQ_BODY(iseq)->param.size;
4136 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4137 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4140 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4145 vm_push_frame(ec, iseq,
4146 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4148 VM_GUARDED_PREV_EP(captured->ep),
4150 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4152 ISEQ_BODY(iseq)->local_table_size - arg_size,
4153 ISEQ_BODY(iseq)->stack_max);
4161 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4165 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4166 if (UNLIKELY(calling->heap_argv)) {
4171 argc = calling->argc;
4174 cfp->sp += - argc - 1;
4177 return vm_call_bmethod_body(ec, calling, argv);
4183 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4187 VALUE procv = cme->def->body.bmethod.proc;
4189 GetProcPtr(procv, proc);
4190 const struct rb_block *block = &proc->block;
4192 while (vm_block_type(block) == block_type_proc) {
4193 block = vm_proc_block(block->as.proc);
4195 if (vm_block_type(block) == block_type_iseq) {
4196 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4197 return vm_call_iseq_bmethod(ec, cfp, calling);
4200 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4201 return vm_call_noniseq_bmethod(ec, cfp, calling);
4205rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4207 VALUE klass = current_class;
4215 while (
RTEST(klass)) {
4217 if (owner == target_owner) {
4223 return current_class;
4232 if (orig_me->defined_class == 0) {
4233 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4234 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4235 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4237 if (me->def->reference_count == 1) {
4238 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4242 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4250 VM_ASSERT(callable_method_entry_p(cme));
4257 return aliased_callable_method_entry(me);
4263 calling->cc = &VM_CC_ON_STACK(
Qundef,
4266 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4268 return vm_call_method_each_type(ec, cfp, calling);
4271static enum method_missing_reason
4274 enum method_missing_reason stat = MISSING_NOENTRY;
4275 if (vm_ci_flag(ci) & VM_CALL_VCALL && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) stat |= MISSING_VCALL;
4276 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4277 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4287 ASSUME(calling->argc >= 0);
4289 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4290 int argc = calling->argc;
4291 VALUE recv = calling->recv;
4294 flags |= VM_CALL_OPT_SEND;
4296 if (UNLIKELY(! mid)) {
4297 mid = idMethodMissing;
4298 missing_reason = ci_missing_reason(ci);
4299 ec->method_missing_reason = missing_reason;
4302 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4303 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4304 rb_ary_unshift(argv_ary, symbol);
4307 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4308 VALUE exc = rb_make_no_method_exception(
4330 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4333 argc = ++calling->argc;
4335 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4338 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4339 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4340 VALUE exc = rb_make_no_method_exception(
4353 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4359 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4360 calling->cd = &new_fcd.cd;
4364 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4365 new_fcd.caller_ci = caller_ci;
4368 calling->cc = &VM_CC_ON_STACK(klass,
4370 { .method_missing_reason = missing_reason },
4371 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4373 if (flags & VM_CALL_FCALL) {
4374 return vm_call_method(ec, reg_cfp, calling);
4378 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4380 if (vm_cc_cme(cc) != NULL) {
4381 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4382 case METHOD_VISI_PUBLIC:
4383 return vm_call_method_each_type(ec, reg_cfp, calling);
4384 case METHOD_VISI_PRIVATE:
4385 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4387 case METHOD_VISI_PROTECTED:
4388 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4391 VM_UNREACHABLE(vm_call_method);
4393 return vm_call_method_missing(ec, reg_cfp, calling);
4396 return vm_call_method_nome(ec, reg_cfp, calling);
4406 i = calling->argc - 1;
4408 if (calling->argc == 0) {
4409 rb_raise(rb_eArgError,
"no method name given");
4433 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4439 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4441 int flags = VM_CALL_FCALL;
4445 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4446 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4448 flags |= VM_CALL_ARGS_SPLAT;
4449 if (calling->kw_splat) {
4450 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4451 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4452 calling->kw_splat = 0;
4454 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4457 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4458 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4464 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4465 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4471 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4474 int flags = vm_ci_flag(ci);
4476 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4477 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4478 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4479 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4480 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4481 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4484 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4485 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4490 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4492 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4494 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4495 unsigned int argc, flag;
4497 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4498 argc = ++calling->argc;
4501 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4502 vm_check_canary(ec, reg_cfp->sp);
4506 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4509 ec->method_missing_reason = reason;
4513 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4519 if (!(flag & VM_CALL_FORWARDING)) {
4520 calling->cd = &new_fcd.cd;
4524 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4525 new_fcd.caller_ci = caller_ci;
4529 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4530 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4531 return vm_call_method(ec, reg_cfp, calling);
4537 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4548 return vm_call_method_nome(ec, cfp, calling);
4550 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4551 cme->def->body.refined.orig_me) {
4552 cme = refined_method_callable_without_refinement(cme);
4555 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4557 return vm_call_method_each_type(ec, cfp, calling);
4561find_refinement(
VALUE refinements,
VALUE klass)
4563 if (
NIL_P(refinements)) {
4566 return rb_hash_lookup(refinements, klass);
4575 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4576 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4579 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4580 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4584 }
while (cfp->iseq != local_iseq);
4595 if (orig_me->defined_class == 0) {
4603 VM_ASSERT(callable_method_entry_p(cme));
4605 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4615 ID mid = vm_ci_mid(calling->cd->ci);
4616 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4620 for (; cref; cref = CREF_NEXT(cref)) {
4621 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4622 if (
NIL_P(refinement))
continue;
4625 rb_callable_method_entry(refinement, mid);
4628 if (vm_cc_call(cc) == vm_call_super_method) {
4631 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4636 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4637 cme->def != ref_me->def) {
4640 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4649 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4650 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4665 if (calling->cd->cc) {
4666 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4668 return vm_call_method(ec, cfp, calling);
4671 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4672 calling->cc= ref_cc;
4673 return vm_call_method(ec, cfp, calling);
4677 return vm_call_method_nome(ec, cfp, calling);
4683NOINLINE(
static VALUE
4691 int argc = calling->argc;
4694 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4697 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4703 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4706 VALUE procval = calling->recv;
4707 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4713 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4715 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4718 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4719 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4722 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4723 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4724 return vm_call_general(ec, reg_cfp, calling);
4731 VALUE recv = calling->recv;
4734 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4735 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4737 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4738 return internal_RSTRUCT_GET(recv,
off);
4744 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4746 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4754 VALUE recv = calling->recv;
4757 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4758 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4760 rb_check_frozen(recv);
4762 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4763 internal_RSTRUCT_SET(recv,
off, val);
4771 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4773 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4781#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4782 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4783 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4784 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4786 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4787 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4798 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4799 case OPTIMIZED_METHOD_TYPE_SEND:
4800 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4801 return vm_call_opt_send(ec, cfp, calling);
4802 case OPTIMIZED_METHOD_TYPE_CALL:
4803 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4804 return vm_call_opt_call(ec, cfp, calling);
4805 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4806 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4807 return vm_call_opt_block_call(ec, cfp, calling);
4808 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4809 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4813 VM_CALL_METHOD_ATTR(v,
4814 vm_call_opt_struct_aref(ec, cfp, calling),
4815 set_vm_cc_ivar(cc); \
4816 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4819 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4820 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4824 VM_CALL_METHOD_ATTR(v,
4825 vm_call_opt_struct_aset(ec, cfp, calling),
4826 set_vm_cc_ivar(cc); \
4827 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4831 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4843 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4845 switch (cme->def->type) {
4846 case VM_METHOD_TYPE_ISEQ:
4847 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4848 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4849 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4852 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4853 return vm_call_iseq_setup(ec, cfp, calling);
4856 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4857 case VM_METHOD_TYPE_CFUNC:
4858 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4859 return vm_call_cfunc(ec, cfp, calling);
4861 case VM_METHOD_TYPE_ATTRSET:
4862 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4866 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4868 if (vm_cc_markable(cc)) {
4869 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4870 VM_CALL_METHOD_ATTR(v,
4871 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4872 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4878 VM_CALLCACHE_UNMARKABLE |
4879 VM_CALLCACHE_ON_STACK,
4885 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4890 VM_CALL_METHOD_ATTR(v,
4891 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4892 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4896 case VM_METHOD_TYPE_IVAR:
4897 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4899 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4900 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4901 VM_CALL_METHOD_ATTR(v,
4902 vm_call_ivar(ec, cfp, calling),
4903 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4906 case VM_METHOD_TYPE_MISSING:
4907 vm_cc_method_missing_reason_set(cc, 0);
4908 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4909 return vm_call_method_missing(ec, cfp, calling);
4911 case VM_METHOD_TYPE_BMETHOD:
4912 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4913 return vm_call_bmethod(ec, cfp, calling);
4915 case VM_METHOD_TYPE_ALIAS:
4916 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4917 return vm_call_alias(ec, cfp, calling);
4919 case VM_METHOD_TYPE_OPTIMIZED:
4920 return vm_call_optimized(ec, cfp, calling, ci, cc);
4922 case VM_METHOD_TYPE_UNDEF:
4925 case VM_METHOD_TYPE_ZSUPER:
4926 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4928 case VM_METHOD_TYPE_REFINED:
4931 return vm_call_refined(ec, cfp, calling);
4934 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4944 const int stat = ci_missing_reason(ci);
4946 if (vm_ci_mid(ci) == idMethodMissing) {
4947 if (UNLIKELY(calling->heap_argv)) {
4952 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4953 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4957 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4969 VALUE defined_class = me->defined_class;
4970 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4971 return NIL_P(refined_class) ? defined_class : refined_class;
4980 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4982 if (vm_cc_cme(cc) != NULL) {
4983 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4984 case METHOD_VISI_PUBLIC:
4985 return vm_call_method_each_type(ec, cfp, calling);
4987 case METHOD_VISI_PRIVATE:
4988 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4989 enum method_missing_reason stat = MISSING_PRIVATE;
4990 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4992 vm_cc_method_missing_reason_set(cc, stat);
4993 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4994 return vm_call_method_missing(ec, cfp, calling);
4996 return vm_call_method_each_type(ec, cfp, calling);
4998 case METHOD_VISI_PROTECTED:
4999 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
5000 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
5002 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
5003 return vm_call_method_missing(ec, cfp, calling);
5007 VM_ASSERT(vm_cc_cme(cc) != NULL);
5010 calling->cc = &cc_on_stack;
5011 return vm_call_method_each_type(ec, cfp, calling);
5014 return vm_call_method_each_type(ec, cfp, calling);
5017 rb_bug(
"unreachable");
5021 return vm_call_method_nome(ec, cfp, calling);
5028 RB_DEBUG_COUNTER_INC(ccf_general);
5029 return vm_call_method(ec, reg_cfp, calling);
5035 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
5036 VM_ASSERT(cc != vm_cc_empty());
5038 *(vm_call_handler *)&cc->call_ = vm_call_general;
5044 RB_DEBUG_COUNTER_INC(ccf_super_method);
5049 if (ec == NULL) rb_bug(
"unreachable");
5052 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
5053 return vm_call_method(ec, reg_cfp, calling);
5059vm_search_normal_superclass(
VALUE klass)
5064 klass =
RBASIC(klass)->klass;
5066 klass = RCLASS_ORIGIN(klass);
5070NORETURN(
static void vm_super_outside(
void));
5073vm_super_outside(
void)
5079empty_cc_for_super(
void)
5081 return &vm_empty_cc_for_super;
5087 VALUE current_defined_class;
5094 current_defined_class = vm_defined_class_for_protected_call(me);
5097 reg_cfp->iseq != method_entry_iseqptr(me) &&
5100 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5104 "self has wrong type to call super in this context: "
5105 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5110 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5112 "implicit argument passing of super from method defined"
5113 " by define_method() is not supported."
5114 " Specify all arguments explicitly.");
5117 ID mid = me->def->original_id;
5119 if (!vm_ci_markable(cd->ci)) {
5120 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5124 cd->ci = vm_ci_new_runtime(mid,
5127 vm_ci_kwarg(cd->ci));
5134 VALUE klass = vm_search_normal_superclass(me->defined_class);
5138 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5142 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5146 if (cached_cme == NULL) {
5148 cd->cc = empty_cc_for_super();
5150 else if (cached_cme->called_id != mid) {
5153 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5157 cd->cc = cc = empty_cc_for_super();
5161 switch (cached_cme->def->type) {
5163 case VM_METHOD_TYPE_REFINED:
5165 case VM_METHOD_TYPE_ATTRSET:
5166 case VM_METHOD_TYPE_IVAR:
5167 vm_cc_call_set(cc, vm_call_super_method);
5175 VM_ASSERT((vm_cc_cme(cc),
true));
5183block_proc_is_lambda(
const VALUE procval)
5188 GetProcPtr(procval, proc);
5189 return proc->is_lambda;
5197block_proc_namespace(
const VALUE procval)
5202 GetProcPtr(procval, proc);
5213 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5216 int is_lambda = FALSE;
5217 VALUE val, arg, blockarg;
5219 const struct vm_ifunc *ifunc = captured->code.ifunc;
5224 else if (argc == 0) {
5231 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5233 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5235 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5238 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5241 VM_GUARDED_PREV_EP(captured->ep),
5243 0, ec->cfp->sp, 0, 0);
5244 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5245 rb_vm_pop_frame(ec);
5253 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5259 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5268 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5270 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5278vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5280 VALUE ary, arg0 = argv[0];
5285 VM_ASSERT(argv[0] == arg0);
5293 if (rb_simple_iseq_p(iseq)) {
5297 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5299 if (arg_setup_type == arg_setup_block &&
5300 calling->argc == 1 &&
5301 ISEQ_BODY(iseq)->param.flags.has_lead &&
5302 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5303 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5304 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5307 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5308 if (arg_setup_type == arg_setup_block) {
5309 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5311 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5312 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5313 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5315 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5316 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5320 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5327 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5336 calling = &calling_entry;
5337 calling->argc = argc;
5338 calling->block_handler = block_handler;
5339 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5341 calling->heap_argv = 0;
5343 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5345 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5353 bool is_lambda,
VALUE block_handler)
5356 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5357 const int arg_size = ISEQ_BODY(iseq)->param.size;
5358 VALUE *
const rsp = GET_SP() - calling->argc;
5359 VALUE *
const argv = rsp;
5360 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5361 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5365 if (calling->proc_ns) {
5366 frame_flag |= VM_FRAME_FLAG_NS_SWITCH;
5369 vm_push_frame(ec, iseq,
5372 VM_GUARDED_PREV_EP(captured->ep), 0,
5373 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5375 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5383 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5385 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5386 int flags = vm_ci_flag(ci);
5388 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5389 ((calling->argc == 0) ||
5390 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5391 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5392 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5393 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5395 if (UNLIKELY(calling->heap_argv)) {
5396#if VM_ARGC_STACK_MAX < 0
5398 rb_raise(rb_eArgError,
"no receiver given");
5404 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5405 reg_cfp->sp[-2] = calling->recv;
5406 flags |= VM_CALL_ARGS_SPLAT;
5409 if (calling->argc < 1) {
5410 rb_raise(rb_eArgError,
"no receiver given");
5412 calling->recv = TOPN(--calling->argc);
5414 if (calling->kw_splat) {
5415 flags |= VM_CALL_KW_SPLAT;
5419 if (calling->argc < 1) {
5420 rb_raise(rb_eArgError,
"no receiver given");
5422 calling->recv = TOPN(--calling->argc);
5425 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5431 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5436 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5437 argc = calling->argc;
5438 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5444vm_proc_to_block_handler(
VALUE procval)
5446 const struct rb_block *block = vm_proc_block(procval);
5448 switch (vm_block_type(block)) {
5449 case block_type_iseq:
5450 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5451 case block_type_ifunc:
5452 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5453 case block_type_symbol:
5454 return VM_BH_FROM_SYMBOL(block->as.symbol);
5455 case block_type_proc:
5456 return VM_BH_FROM_PROC(block->as.proc);
5458 VM_UNREACHABLE(vm_yield_with_proc);
5465 bool is_lambda,
VALUE block_handler)
5467 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5468 VALUE proc = VM_BH_TO_PROC(block_handler);
5469 if (!calling->proc_ns) {
5470 calling->proc_ns = block_proc_namespace(proc);
5472 is_lambda = block_proc_is_lambda(proc);
5473 block_handler = vm_proc_to_block_handler(proc);
5476 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5482 bool is_lambda,
VALUE block_handler)
5486 bool is_lambda,
VALUE block_handler);
5488 switch (vm_block_handler_type(block_handler)) {
5489 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5490 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5491 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5492 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5493 default: rb_bug(
"vm_invoke_block: unreachable");
5496 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5500vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5507 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5510 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5511 captured->code.iseq = blockiseq;
5513 return rb_vm_make_proc(ec, captured,
rb_cProc);
5517vm_once_exec(
VALUE iseq)
5524vm_once_clear(
VALUE data)
5527 is->once.running_thread = NULL;
5539 args[0] = obj; args[1] =
Qfalse;
5541 if (!UNDEF_P(r) &&
RTEST(r)) {
5553 enum defined_type
type = (
enum defined_type)op_type;
5560 return rb_gvar_defined(
SYM2ID(obj));
5562 case DEFINED_CVAR: {
5563 const rb_cref_t *cref = vm_get_cref(GET_EP());
5564 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5569 case DEFINED_CONST_FROM: {
5570 bool allow_nil =
type == DEFINED_CONST;
5572 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5577 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5579 case DEFINED_METHOD:{
5584 switch (METHOD_ENTRY_VISI(me)) {
5585 case METHOD_VISI_PRIVATE:
5587 case METHOD_VISI_PROTECTED:
5591 case METHOD_VISI_PUBLIC:
5595 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5599 return check_respond_to_missing(obj, v);
5604 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5608 case DEFINED_ZSUPER:
5613 VALUE klass = vm_search_normal_superclass(me->defined_class);
5614 if (!klass)
return false;
5616 ID id = me->def->original_id;
5623 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5625 rb_bug(
"unimplemented defined? type (VM)");
5635 return vm_defined(ec, reg_cfp, op_type, obj, v);
5639vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5642 const VALUE *ep = reg_ep;
5643 for (i = 0; i < lv; i++) {
5644 ep = GET_PREV_EP(ep);
5650vm_get_special_object(
const VALUE *
const reg_ep,
5651 enum vm_special_object_type
type)
5654 case VM_SPECIAL_OBJECT_VMCORE:
5655 return rb_mRubyVMFrozenCore;
5656 case VM_SPECIAL_OBJECT_CBASE:
5657 return vm_get_cbase(reg_ep);
5658 case VM_SPECIAL_OBJECT_CONST_BASE:
5659 return vm_get_const_base(reg_ep);
5661 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5668rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5670 return vm_get_special_object(reg_ep,
type);
5676 const VALUE ary2 = ary2st;
5677 VALUE tmp1 = rb_check_to_array(ary1);
5678 VALUE tmp2 = rb_check_to_array(ary2);
5699 const VALUE ary2 = ary2st;
5701 if (
NIL_P(ary2))
return ary1;
5703 VALUE tmp2 = rb_check_to_array(ary2);
5718 return vm_concat_array(ary1, ary2st);
5722rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5724 return vm_concat_to_array(ary1, ary2st);
5733 VALUE tmp = rb_check_to_array(ary);
5737 else if (
RTEST(flag)) {
5750 return vm_splat_array(flag, ary);
5756 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5758 if (flag & VM_CHECKMATCH_ARRAY) {
5762 for (i = 0; i < n; i++) {
5764 VALUE c = check_match(ec, v, target,
type);
5773 return check_match(ec, pattern, target,
type);
5780 return vm_check_match(ec, target, pattern, flag);
5784vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5786 const VALUE kw_bits = *(ep - bits);
5789 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5790 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5803 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5804 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5805 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5806 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5810 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5813 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5816 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5819 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5826vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5831 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5832 return rb_public_const_get_at(cbase,
id);
5840vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5845 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5850 "superclass mismatch for class %"PRIsVALUE
"",
5863vm_check_if_module(
ID id,
VALUE mod)
5882vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5885 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5893vm_declare_module(
ID id,
VALUE cbase)
5899NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5903 VALUE name = rb_id2str(
id);
5904 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5906 VALUE location = rb_const_source_location_at(cbase,
id);
5907 if (!
NIL_P(location)) {
5908 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5909 " previous definition of %"PRIsVALUE
" was here",
5916vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5920 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5922 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5926 vm_check_if_namespace(cbase);
5931 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5932 if (!vm_check_if_class(
id, flags, super, klass))
5933 unmatched_redefinition(
"class", cbase,
id, klass);
5937 return vm_declare_class(
id, flags, cbase, super);
5942vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5946 vm_check_if_namespace(cbase);
5947 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5948 if (!vm_check_if_module(
id, mod))
5949 unmatched_redefinition(
"module", cbase,
id, mod);
5953 return vm_declare_module(
id, cbase);
5958vm_find_or_create_class_by_id(
ID id,
5963 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5966 case VM_DEFINECLASS_TYPE_CLASS:
5968 return vm_define_class(
id, flags, cbase, super);
5970 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5974 case VM_DEFINECLASS_TYPE_MODULE:
5976 return vm_define_module(
id, flags, cbase);
5979 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5983static rb_method_visibility_t
5988 if (!vm_env_cref_by_cref(cfp->ep)) {
5989 return METHOD_VISI_PUBLIC;
5992 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
6001 if (!vm_env_cref_by_cref(cfp->ep)) {
6005 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
6013 rb_method_visibility_t visi;
6018 visi = METHOD_VISI_PUBLIC;
6021 klass = CREF_CLASS_FOR_DEFINITION(cref);
6022 visi = vm_scope_visibility_get(ec);
6029 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
6032 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
6035 if (!is_singleton && vm_scope_module_func_check(ec)) {
6037 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
6047 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
6049 if (block_handler == VM_BLOCK_HANDLER_NONE) {
6050 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
6053 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
6057enum method_explorer_type {
6059 mexp_search_invokeblock,
6068 VALUE block_handler,
6069 enum method_explorer_type method_explorer
6074 int argc = vm_ci_argc(ci);
6075 VALUE recv = TOPN(argc);
6077 .block_handler = block_handler,
6078 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
6084 switch (method_explorer) {
6085 case mexp_search_method:
6086 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
6087 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6089 case mexp_search_super:
6090 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
6091 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6093 case mexp_search_invokeblock:
6094 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6104 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6105 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6118 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6120 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6122 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6134 VALUE bh = VM_BLOCK_HANDLER_NONE;
6135 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6145 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6146 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6159 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6161 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6163 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6175 VALUE bh = VM_BLOCK_HANDLER_NONE;
6176 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6204 if (check_method_basic_definition(cme)) {
6213 if (check_cfunc(cme, rb_mod_to_s)) {
6219 val = rb_mod_to_s(recv);
6225 if (check_cfunc(cme, rb_nil_to_s)) {
6226 return rb_nil_to_s(recv);
6230 if (check_cfunc(cme, rb_true_to_s)) {
6231 return rb_true_to_s(recv);
6235 if (check_cfunc(cme, rb_false_to_s)) {
6236 return rb_false_to_s(recv);
6240 if (check_cfunc(cme, rb_int_to_s)) {
6241 return rb_fix_to_s(recv);
6253 return vm_objtostring(iseq, recv, cd);
6257vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6259 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6268vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6270 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6279vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6281 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6295 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6299 VALUE args[1] = {target};
6302 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6305 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6312 return vm_opt_duparray_include_p(ec, ary, target);
6318 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6323 VALUE result = *ptr;
6324 rb_snum_t i = num - 1;
6326 const VALUE v = *++ptr;
6327 if (OPTIMIZED_CMP(v, result) > 0) {
6342 return vm_opt_newarray_max(ec, num, ptr);
6348 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6353 VALUE result = *ptr;
6354 rb_snum_t i = num - 1;
6356 const VALUE v = *++ptr;
6357 if (OPTIMIZED_CMP(v, result) < 0) {
6372 return vm_opt_newarray_min(ec, num, ptr);
6379 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6380 return rb_ary_hash_values(num, ptr);
6390 return vm_opt_newarray_hash(ec, num, ptr);
6399 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6401 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6405 VALUE args[1] = {target};
6413 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6419 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6421 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6422 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6432 if (!UNDEF_P(buffer)) {
6433 args[1] = rb_hash_new_with_size(1);
6434 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6439 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6446 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6452 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6458vm_track_constant_cache(
ID id,
void *ic)
6461 struct rb_id_table *const_cache = vm->constant_cache;
6462 VALUE lookup_result;
6465 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6469 ics = set_init_numtable();
6470 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6485 vm->inserting_constant_cache_id = id;
6487 set_insert(ics, (st_data_t)ic);
6489 vm->inserting_constant_cache_id = (
ID)0;
6496 for (
int i = 0; segments[i]; i++) {
6497 ID id = segments[i];
6498 if (
id == idNULL)
continue;
6499 vm_track_constant_cache(
id, ic);
6508 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6509 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6511 return (ic_cref == NULL ||
6512 ic_cref == vm_get_cref(reg_ep));
6520 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6521 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6526rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6528 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6534 if (ruby_vm_const_missing_count > 0) {
6535 ruby_vm_const_missing_count = 0;
6542 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6547 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6548 rb_yjit_constant_ic_update(iseq, ic, pos);
6557 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6560 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6563 ruby_vm_constant_cache_misses++;
6564 val = vm_get_ev_const_chain(ec, segments);
6565 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6568 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6580 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6581 return is->once.value;
6583 else if (is->once.running_thread == NULL) {
6585 is->once.running_thread = th;
6589 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6592 else if (is->once.running_thread == th) {
6594 return vm_once_exec((
VALUE)iseq);
6598 RUBY_VM_CHECK_INTS(ec);
6605vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6607 switch (OBJ_BUILTIN_TYPE(key)) {
6613 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6614 SYMBOL_REDEFINED_OP_FLAG |
6615 INTEGER_REDEFINED_OP_FLAG |
6616 FLOAT_REDEFINED_OP_FLAG |
6617 NIL_REDEFINED_OP_FLAG |
6618 TRUE_REDEFINED_OP_FLAG |
6619 FALSE_REDEFINED_OP_FLAG |
6620 STRING_REDEFINED_OP_FLAG)) {
6624 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6628 if (rb_hash_stlike_lookup(hash, key, &val)) {
6648 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6649 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6650 static const char stack_consistency_error[] =
6651 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6652#if defined RUBY_DEVEL
6653 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6658 rb_bug(stack_consistency_error, nsp, nbp);
6665 if (FIXNUM_2_P(recv, obj) &&
6666 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6667 return rb_fix_plus_fix(recv, obj);
6669 else if (FLONUM_2_P(recv, obj) &&
6670 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6678 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6683 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6684 return rb_str_opt_plus(recv, obj);
6688 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6699 if (FIXNUM_2_P(recv, obj) &&
6700 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6701 return rb_fix_minus_fix(recv, obj);
6703 else if (FLONUM_2_P(recv, obj) &&
6704 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6712 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6723 if (FIXNUM_2_P(recv, obj) &&
6724 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6725 return rb_fix_mul_fix(recv, obj);
6727 else if (FLONUM_2_P(recv, obj) &&
6728 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6736 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6747 if (FIXNUM_2_P(recv, obj) &&
6748 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6749 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6751 else if (FLONUM_2_P(recv, obj) &&
6752 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6753 return rb_flo_div_flo(recv, obj);
6760 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6761 return rb_flo_div_flo(recv, obj);
6771 if (FIXNUM_2_P(recv, obj) &&
6772 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6773 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6775 else if (FLONUM_2_P(recv, obj) &&
6776 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6784 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6795 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6796 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6798 if (!UNDEF_P(val)) {
6799 return RBOOL(!
RTEST(val));
6809 if (FIXNUM_2_P(recv, obj) &&
6810 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6813 else if (FLONUM_2_P(recv, obj) &&
6814 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6822 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6834 if (FIXNUM_2_P(recv, obj) &&
6835 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6838 else if (FLONUM_2_P(recv, obj) &&
6839 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6847 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6859 if (FIXNUM_2_P(recv, obj) &&
6860 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6863 else if (FLONUM_2_P(recv, obj) &&
6864 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6872 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6884 if (FIXNUM_2_P(recv, obj) &&
6885 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6888 else if (FLONUM_2_P(recv, obj) &&
6889 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6897 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6914 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6923 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6941 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6952 if (FIXNUM_2_P(recv, obj) &&
6953 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6965 if (FIXNUM_2_P(recv, obj) &&
6966 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6967 return rb_fix_aref(recv, obj);
6972 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6974 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6977 return rb_ary_aref1(recv, obj);
6981 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6982 return rb_hash_aref(recv, obj);
6996 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
7002 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
7003 rb_hash_aset(recv, obj, set);
7012vm_opt_length(
VALUE recv,
int bop)
7018 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
7019 if (bop == BOP_EMPTY_P) {
7020 return LONG2NUM(RSTRING_LEN(recv));
7027 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
7031 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
7040vm_opt_empty_p(
VALUE recv)
7042 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
7055 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
7058 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
7074 case RSHIFT(~0UL, 1):
7077 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7095vm_opt_succ(
VALUE recv)
7098 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7099 return fix_succ(recv);
7105 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7116 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7117 return RBOOL(!
RTEST(recv));
7132 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7136 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7154 VALUE self = GET_SELF();
7156 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7158 if (event & global_hooks->events) {
7161 vm_dtrace(event, ec);
7162 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7168 if (local_hooks != NULL) {
7169 if (event & local_hooks->events) {
7172 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7178#define VM_TRACE_HOOK(target_event, val) do { \
7179 if ((pc_events & (target_event)) & enabled_flags) { \
7180 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7187 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7188 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7189 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7195 const VALUE *pc = reg_cfp->pc;
7196 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7199 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7205 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7208 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7209 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7213 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7214 enabled_flags |= iseq_local_events;
7216 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7218 if (bmethod_frame) {
7220 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7221 bmethod_local_hooks = me->def->body.bmethod.hooks;
7222 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7223 if (bmethod_local_hooks) {
7224 bmethod_local_events = bmethod_local_hooks->events;
7229 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7233 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7241 else if (ec->trace_arg != NULL) {
7249 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7252 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7255 RSTRING_PTR(rb_iseq_path(iseq)),
7256 (
int)rb_iseq_line_no(iseq, pos),
7257 RSTRING_PTR(rb_iseq_label(iseq)));
7259 VM_ASSERT(reg_cfp->pc == pc);
7260 VM_ASSERT(pc_events != 0);
7270 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7271 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7289#if VM_CHECK_MODE > 0
7290NORETURN( NOINLINE( COLDFUNC
7291void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7294Init_vm_stack_canary(
void)
7297 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7298 vm_stack_canary |= 0x01;
7300 vm_stack_canary_was_born =
true;
7305rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7309 const char *insn = rb_insns_name(i);
7313 rb_bug(
"dead canary found at %s: %s", insn, str);
7317void Init_vm_stack_canary(
void) { }
7349 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7356 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7363 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7370 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7377 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7384 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7391 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7398 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7405 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7411 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7412 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7418 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7419 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7425 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7426 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7432 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7433 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7439 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7440 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7446 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7447 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7453 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7454 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7459static builtin_invoker
7460lookup_builtin_invoker(
int argc)
7462 static const builtin_invoker invokers[] = {
7481 return invokers[argc];
7487 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7488 SETUP_CANARY(canary_p);
7489 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7490 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7491 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7498 return invoke_bf(ec, cfp, bf, argv);
7505 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7506 for (
int i=0; i<bf->argc; i++) {
7507 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7509 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7510 (
void *)(uintptr_t)bf->func_ptr);
7513 if (bf->argc == 0) {
7514 return invoke_bf(ec, cfp, bf, NULL);
7517 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7518 return invoke_bf(ec, cfp, bf, argv);
7528 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Internal header for Namespace.
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.