11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
767 const VALUE *ep = cfp->ep;
770 while (!VM_ENV_LOCAL_P(ep)) {
771 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
772 ep = VM_ENV_PREV_EP(ep);
775 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
781 switch (me->def->type) {
782 case VM_METHOD_TYPE_ISEQ:
783 return me->def->body.iseq.
iseqptr;
792 switch (me->def->type) {
793 case VM_METHOD_TYPE_ISEQ:
794 return me->def->body.iseq.
cref;
800#if VM_CHECK_MODE == 0
804check_cref(
VALUE obj,
int can_be_svar)
806 if (obj ==
Qfalse)
return NULL;
809 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
812 switch (imemo_type(obj)) {
823 rb_bug(
"check_method_entry: svar should not be there:");
830vm_env_cref(
const VALUE *ep)
834 while (!VM_ENV_LOCAL_P(ep)) {
835 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
836 ep = VM_ENV_PREV_EP(ep);
839 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
843is_cref(
const VALUE v,
int can_be_svar)
846 switch (imemo_type(v)) {
859vm_env_cref_by_cref(
const VALUE *ep)
861 while (!VM_ENV_LOCAL_P(ep)) {
862 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
863 ep = VM_ENV_PREV_EP(ep);
865 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
869cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
871 const VALUE v = *vptr;
875 switch (imemo_type(v)) {
878 new_cref = vm_cref_dup(cref);
883 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
888 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
892 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
901vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
903 if (vm_env_cref_by_cref(ep)) {
907 while (!VM_ENV_LOCAL_P(ep)) {
908 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
909 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
912 ep = VM_ENV_PREV_EP(ep);
914 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
915 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
918 rb_bug(
"vm_cref_dup: unreachable");
923vm_get_cref(
const VALUE *ep)
931 rb_bug(
"vm_get_cref: unreachable");
936rb_vm_get_cref(
const VALUE *ep)
938 return vm_get_cref(ep);
949 return vm_get_cref(cfp->ep);
953vm_get_const_key_cref(
const VALUE *ep)
959 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
960 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
963 cref = CREF_NEXT(cref);
976 #define ADD_NEW_CREF(new_cref) \
977 if (new_cref_tail) { \
978 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
981 new_cref_head = new_cref; \
983 new_cref_tail = new_cref;
987 if (CREF_CLASS(cref) == old_klass) {
988 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
989 ADD_NEW_CREF(new_cref);
990 return new_cref_head;
992 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
993 cref = CREF_NEXT(cref);
994 ADD_NEW_CREF(new_cref);
1000 return new_cref_head;
1009 prev_cref = vm_env_cref(ep);
1015 prev_cref = vm_env_cref(cfp->ep);
1019 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1023vm_get_cbase(
const VALUE *ep)
1025 const rb_cref_t *cref = vm_get_cref(ep);
1027 return CREF_CLASS_FOR_DEFINITION(cref);
1031vm_get_const_base(
const VALUE *ep)
1033 const rb_cref_t *cref = vm_get_cref(ep);
1036 if (!CREF_PUSHED_BY_EVAL(cref)) {
1037 return CREF_CLASS_FOR_DEFINITION(cref);
1039 cref = CREF_NEXT(cref);
1046vm_check_if_namespace(
VALUE klass)
1049 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1054vm_ensure_not_refinement_module(
VALUE self)
1057 rb_warn(
"not defined at the refinement, but at the outer class/module");
1073 if (
NIL_P(orig_klass) && allow_nil) {
1075 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1079 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1080 root_cref = CREF_NEXT(root_cref);
1083 while (cref && CREF_NEXT(cref)) {
1084 if (CREF_PUSHED_BY_EVAL(cref)) {
1088 klass = CREF_CLASS(cref);
1090 cref = CREF_NEXT(cref);
1092 if (!
NIL_P(klass)) {
1096 if ((ce = rb_const_lookup(klass,
id))) {
1097 rb_const_warn_if_deprecated(ce, klass,
id);
1100 if (am == klass)
break;
1102 if (is_defined)
return 1;
1103 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1105 goto search_continue;
1112 if (UNLIKELY(!rb_ractor_main_p())) {
1114 rb_raise(rb_eRactorIsolationError,
1115 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1126 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1127 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1141 vm_check_if_namespace(orig_klass);
1143 return rb_public_const_defined_from(orig_klass,
id);
1146 return rb_public_const_get_from(orig_klass,
id);
1154 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1162 int allow_nil = TRUE;
1163 if (segments[0] == idNULL) {
1168 while (segments[idx]) {
1169 ID id = segments[idx++];
1170 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1183 rb_bug(
"vm_get_cvar_base: no cref");
1186 while (CREF_NEXT(cref) &&
1187 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1188 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1189 cref = CREF_NEXT(cref);
1191 if (top_level_raise && !CREF_NEXT(cref)) {
1195 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1203ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1205fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1208 vm_cc_attr_index_set(cc, index, shape_id);
1211 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1215#define ractor_incidental_shareable_p(cond, val) \
1216 (!(cond) || rb_ractor_shareable_p(val))
1217#define ractor_object_incidental_shareable_p(obj, val) \
1218 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1227 return default_value;
1237 if (UNLIKELY(!rb_ractor_main_p())) {
1245 if (default_value ==
Qundef) {
1253 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1257 fields_obj = rb_obj_fields(obj,
id);
1261 return default_value;
1266 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1267 VALUE *ivar_list = rb_imemo_fields_ptr(fields_obj);
1269 shape_id_t cached_id;
1273 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1276 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1279 if (LIKELY(cached_id == shape_id)) {
1282 if (index == ATTR_INDEX_NOT_SET) {
1283 return default_value;
1286 val = ivar_list[index];
1287#if USE_DEBUG_COUNTER
1288 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1291 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1297#if USE_DEBUG_COUNTER
1299 if (cached_id != INVALID_SHAPE_ID) {
1300 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1303 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1307 if (cached_id != INVALID_SHAPE_ID) {
1308 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1311 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1314 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1317 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1321 if (UNLIKELY(rb_shape_too_complex_p(shape_id))) {
1325 RUBY_ASSERT(table == rb_imemo_fields_complex_tbl(fields_obj));
1327 if (!st_lookup(table,
id, &val)) {
1328 val = default_value;
1332 shape_id_t previous_cached_id = cached_id;
1333 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1336 if (cached_id != previous_cached_id) {
1337 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1340 if (index == ATTR_INDEX_NOT_SET) {
1341 val = default_value;
1345 val = ivar_list[index];
1351 vm_cc_attr_index_initialize(cc, shape_id);
1354 vm_ic_attr_index_initialize(ic, shape_id);
1357 val = default_value;
1362 if (!UNDEF_P(default_value)) {
1370 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1373 return rb_attr_get(obj,
id);
1381populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1383 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1387 vm_cc_attr_index_set(cc, index, next_shape_id);
1390 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1402 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1405 rb_check_frozen(obj);
1407 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1409 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1411 if (!rb_shape_too_complex_p(next_shape_id)) {
1412 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1415 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1425 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1431 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1434NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1436vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1438 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1441 if (shape_id == dest_shape_id) {
1442 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1444 else if (dest_shape_id != INVALID_SHAPE_ID) {
1445 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1446 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1456 VALUE fields_obj = rb_obj_fields(obj,
id);
1458 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1460 if (shape_id != dest_shape_id) {
1461 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1462 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1465 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1471vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1479 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1480 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1482 if (LIKELY(shape_id == dest_shape_id)) {
1483 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1486 else if (dest_shape_id != INVALID_SHAPE_ID) {
1487 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1488 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1490 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1492 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1507 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1508 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1514 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1526 VALUE defined_class = 0;
1530 defined_class =
RBASIC(defined_class)->klass;
1533 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1535 rb_bug(
"the cvc table should be set");
1539 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1540 rb_bug(
"should have cvar cache entry");
1545 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1561 cref = vm_get_cref(GET_EP());
1563 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1564 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1566 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1572 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1574 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1580 return vm_getclassvariable(iseq, cfp,
id, ic);
1587 cref = vm_get_cref(GET_EP());
1589 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1590 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1592 rb_class_ivar_set(ic->entry->class_value,
id, val);
1596 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1600 update_classvariable_cache(iseq, klass,
id, cref, ic);
1606 vm_setclassvariable(iseq, cfp,
id, val, ic);
1612 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1623 shape_id_t dest_shape_id;
1625 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1627 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1634 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1638 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1645 vm_setinstancevariable(iseq, obj,
id, val, ic);
1654 ec->tag->state = RUBY_TAG_FATAL;
1657 ec->tag->state = TAG_THROW;
1659 else if (THROW_DATA_P(err)) {
1660 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1663 ec->tag->state = TAG_RAISE;
1670 const int flag,
const VALUE throwobj)
1678 else if (state == TAG_BREAK) {
1680 const VALUE *ep = GET_EP();
1681 const rb_iseq_t *base_iseq = GET_ISEQ();
1682 escape_cfp = reg_cfp;
1684 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1685 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1686 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1687 ep = escape_cfp->ep;
1688 base_iseq = escape_cfp->iseq;
1691 ep = VM_ENV_PREV_EP(ep);
1692 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1693 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1694 VM_ASSERT(escape_cfp->iseq == base_iseq);
1698 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1704 ep = VM_ENV_PREV_EP(ep);
1706 while (escape_cfp < eocfp) {
1707 if (escape_cfp->ep == ep) {
1708 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1709 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1714 for (i=0; i < ct->size; i++) {
1716 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1718 if (entry->type == CATCH_TYPE_BREAK &&
1719 entry->iseq == base_iseq &&
1720 entry->start < epc && entry->end >= epc) {
1721 if (entry->cont == epc) {
1730 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1735 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1738 else if (state == TAG_RETRY) {
1739 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1741 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1743 else if (state == TAG_RETURN) {
1744 const VALUE *current_ep = GET_EP();
1745 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1746 int in_class_frame = 0;
1748 escape_cfp = reg_cfp;
1751 while (!VM_ENV_LOCAL_P(ep)) {
1752 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1755 ep = VM_ENV_PREV_EP(ep);
1759 while (escape_cfp < eocfp) {
1760 const VALUE *lep = VM_CF_LEP(escape_cfp);
1766 if (lep == target_lep &&
1767 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1768 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1773 if (lep == target_lep) {
1774 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1776 if (in_class_frame) {
1781 const VALUE *tep = current_ep;
1783 while (target_lep != tep) {
1784 if (escape_cfp->ep == tep) {
1786 if (tep == target_ep) {
1790 goto unexpected_return;
1793 tep = VM_ENV_PREV_EP(tep);
1797 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1798 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1800 case ISEQ_TYPE_MAIN:
1802 if (in_class_frame)
goto unexpected_return;
1803 if (target_ep == NULL) {
1807 goto unexpected_return;
1811 case ISEQ_TYPE_EVAL: {
1813 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1814 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1815 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1816 t = ISEQ_BODY(is)->type;
1818 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1821 case ISEQ_TYPE_CLASS:
1830 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1831 if (target_ep == NULL) {
1835 goto unexpected_return;
1839 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1842 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1848 rb_bug(
"isns(throw): unsupported throw type");
1851 ec->tag->state = state;
1852 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1857 rb_num_t throw_state,
VALUE throwobj)
1859 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1860 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1863 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1866 return vm_throw_continue(ec, throwobj);
1873 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1879 int is_splat = flag & 0x01;
1882 const VALUE obj = ary;
1894 if (num + is_splat == 0) {
1897 else if (flag & 0x02) {
1902 for (i = 0; i < num -
len; i++) {
1907 for (j = 0; i < num; i++, j++) {
1929 for (; i < num -
len; i++) {
1933 for (rb_num_t j = 0; i < num; i++, j++) {
1934 *cfp->sp++ = ptr[
len - j - 1];
1938 for (rb_num_t j = 0; j < num; j++) {
1939 *cfp->sp++ = ptr[num - j - 1];
1954 int initial_capa = 2;
1956#if VM_CHECK_MODE > 0
1957 ccs->debug_sig = ~(
VALUE)ccs;
1959 ccs->capa = initial_capa;
1964 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
1972 if (! vm_cc_markable(cc)) {
1976 if (UNLIKELY(ccs->len == ccs->capa)) {
1979 ccs = ruby_xrealloc(ccs, vm_ccs_alloc_size(ccs->capa));
1980#if VM_CHECK_MODE > 0
1981 ccs->debug_sig = ~(
VALUE)ccs;
1984 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
1986 VM_ASSERT(ccs->len < ccs->capa);
1988 const int pos = ccs->len++;
1989 ccs->entries[pos].argc = vm_ci_argc(ci);
1990 ccs->entries[pos].flag = vm_ci_flag(ci);
1993 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1999#if VM_CHECK_MODE > 0
2003 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2004 for (
int i=0; i<ccs->len; i++) {
2005 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2006 ccs->entries[i].flag,
2007 ccs->entries[i].argc);
2008 rp(ccs->entries[i].cc);
2015 VM_ASSERT(vm_ccs_p(ccs));
2016 VM_ASSERT(ccs->len <= ccs->capa);
2018 for (
int i=0; i<ccs->len; i++) {
2021 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2022 VM_ASSERT(vm_cc_class_check(cc, klass));
2023 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2024 VM_ASSERT(!vm_cc_super_p(cc));
2025 VM_ASSERT(!vm_cc_refinement_p(cc));
2036 ASSERT_vm_locking();
2038 if (rb_multi_ractor_p()) {
2039 if (RCLASS_WRITABLE_CC_TBL(klass) != cc_tbl) {
2046 rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs);
2048 if (!ccs || !METHOD_ENTRY_INVALIDATED(ccs->cme)) {
2053 VALUE new_table = rb_vm_cc_table_dup(cc_tbl);
2054 rb_vm_cc_table_delete(new_table, mid);
2055 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), new_table);
2058 rb_vm_cc_table_delete(cc_tbl, mid);
2065 ASSERT_vm_locking();
2067 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2068 const VALUE original_cc_table = cc_tbl;
2072 cc_tbl = rb_vm_cc_table_create(1);
2074 else if (rb_multi_ractor_p()) {
2075 cc_tbl = rb_vm_cc_table_dup(cc_tbl);
2078 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2084 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2086 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2089 cme = rb_callable_method_entry(klass, mid);
2092 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2096 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2097 return &vm_empty_cc;
2100 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2107 if (!LIKELY(rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs))) {
2109 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2113 cme = rb_check_overloaded_cme(cme, ci);
2115 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2116 vm_ccs_push(cc_tbl, mid, ccs, ci, cc);
2118 VM_ASSERT(vm_cc_cme(cc) != NULL);
2119 VM_ASSERT(cme->called_id == mid);
2120 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2122 if (original_cc_table != cc_tbl) {
2123 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), cc_tbl);
2135 cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
2142 if (rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs)) {
2143 const int ccs_len = ccs->len;
2145 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2147 vm_evict_cc(klass, cc_tbl, mid);
2152 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2157 unsigned int argc = vm_ci_argc(ci);
2158 unsigned int flag = vm_ci_flag(ci);
2160 for (
int i=0; i<ccs_len; i++) {
2161 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2162 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2163 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2165 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2167 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2168 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2170 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2171 VM_ASSERT(ccs_cc->klass == klass);
2172 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2188 const ID mid = vm_ci_mid(ci);
2190 const struct rb_callcache *cc = vm_lookup_cc(klass, ci, mid);
2196 if (rb_multi_ractor_p()) {
2199 cc = vm_lookup_cc(klass, ci, mid);
2203 cc = vm_populate_cc(klass, ci, mid);
2217 cc = vm_search_cc(klass, ci);
2220 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2221 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2222 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2223 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2224 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2232#if USE_DEBUG_COUNTER
2236 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2238#if OPT_INLINE_METHOD_CACHE
2242 if (cd_owner && cc != empty_cc) {
2246#if USE_DEBUG_COUNTER
2247 if (!old_cc || old_cc == empty_cc) {
2249 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2251 else if (old_cc == cc) {
2252 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2254 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2255 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2257 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2258 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2259 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2262 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2267 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2268 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2279#if OPT_INLINE_METHOD_CACHE
2280 if (LIKELY(vm_cc_class_check(cc, klass))) {
2281 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2282 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2283 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2284 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2285 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2286 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2290 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2293 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2297 return vm_search_method_slowpath0(cd_owner, cd, klass);
2304 VM_ASSERT(klass !=
Qfalse);
2307 const struct rb_callcache *cc = vm_search_method_fastpath(cd_owner, cd, klass);
2308 return vm_cc_cme(cc);
2311#if __has_attribute(transparent_union)
2324 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2325 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2326 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2327 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2328 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2329 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2332# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2335# define make_cfunc_type(f) (cfunc_type)(f)
2345 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2346 VM_ASSERT(callable_method_entry_p(me));
2348 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2352#if __has_attribute(transparent_union)
2353 return me->def->body.cfunc.func == func.anyargs;
2355 return me->def->body.cfunc.func == func;
2364 return me && METHOD_ENTRY_BASIC(me);
2370 VM_ASSERT(iseq != NULL);
2372 return check_cfunc(cme, func);
2375#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2376#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2378#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2410opt_equality_specialized(
VALUE recv,
VALUE obj)
2412 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2413 goto compare_by_identity;
2415 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2416 goto compare_by_identity;
2419 goto compare_by_identity;
2428#if MSC_VERSION_BEFORE(1300)
2432 else if (isnan(b)) {
2437 return RBOOL(a == b);
2444 return rb_str_eql_internal(obj, recv);
2449 compare_by_identity:
2450 return RBOOL(recv == obj);
2456 VM_ASSERT(cd_owner != NULL);
2458 VALUE val = opt_equality_specialized(recv, obj);
2459 if (!UNDEF_P(val))
return val;
2461 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2465 return RBOOL(recv == obj);
2469#undef EQ_UNREDEFINED_P
2472NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2475opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2477 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2479 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2480 return RBOOL(recv == obj);
2490 VALUE val = opt_equality_specialized(recv, obj);
2491 if (!UNDEF_P(val)) {
2495 return opt_equality_by_mid_slowpath(recv, obj, mid);
2502 return opt_equality_by_mid(obj1, obj2, idEq);
2508 return opt_equality_by_mid(obj1, obj2, idEqlP);
2518 case VM_CHECKMATCH_TYPE_WHEN:
2520 case VM_CHECKMATCH_TYPE_RESCUE:
2522 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2525 case VM_CHECKMATCH_TYPE_CASE: {
2526 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2529 rb_bug(
"check_match: unreachable");
2534#if MSC_VERSION_BEFORE(1300)
2535#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2537#define CHECK_CMP_NAN(a, b)
2541double_cmp_lt(
double a,
double b)
2543 CHECK_CMP_NAN(a, b);
2544 return RBOOL(a < b);
2548double_cmp_le(
double a,
double b)
2550 CHECK_CMP_NAN(a, b);
2551 return RBOOL(a <= b);
2555double_cmp_gt(
double a,
double b)
2557 CHECK_CMP_NAN(a, b);
2558 return RBOOL(a > b);
2562double_cmp_ge(
double a,
double b)
2564 CHECK_CMP_NAN(a, b);
2565 return RBOOL(a >= b);
2569static inline VALUE *
2574 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2575 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2577 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2578 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2579 int params = ISEQ_BODY(cfp->iseq)->param.size;
2582 bp += vm_ci_argc(ci);
2585 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2589#if VM_DEBUG_BP_CHECK
2590 if (bp != cfp->bp_check) {
2591 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2592 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2593 (
long)(bp - GET_EC()->vm_stack));
2594 rb_bug(
"vm_base_ptr: unreachable");
2607 return vm_base_ptr(cfp);
2622static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2627 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2629 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2635 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2638 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2639 int param = ISEQ_BODY(iseq)->param.size;
2640 int local = ISEQ_BODY(iseq)->local_table_size;
2641 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2647 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2648 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2649 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2650 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2651 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2652 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2653 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2654 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2658rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2660 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2661 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2662 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2663 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2664 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2665 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2666 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2667 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2671rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2673 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2674 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2675 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2676 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2677 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2678 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2679 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2682#define ALLOW_HEAP_ARGV (-2)
2683#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2688 vm_check_canary(GET_EC(), cfp->sp);
2694 int argc = calling->argc;
2696 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2700 VALUE *argv = cfp->sp - argc;
2704 cfp->sp -= argc - 1;
2705 cfp->sp[-1] = argv_ary;
2707 calling->heap_argv = argv_ary;
2713 if (max_args >= 0 &&
len + argc > max_args) {
2721 calling->argc +=
len - (max_args - argc + 1);
2722 len = max_args - argc + 1;
2731 calling->heap_argv = 0;
2733 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2735 for (i = 0; i <
len; i++) {
2736 *cfp->sp++ = ptr[i];
2748 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2749 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2750 const VALUE h = rb_hash_new_with_size(kw_len);
2751 VALUE *sp = cfp->sp;
2754 for (i=0; i<kw_len; i++) {
2755 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2759 cfp->sp -= kw_len - 1;
2760 calling->argc -= kw_len - 1;
2761 calling->kw_splat = 1;
2765vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2768 if (keyword_hash !=
Qnil) {
2770 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2773 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2779 keyword_hash = rb_hash_dup(keyword_hash);
2781 return keyword_hash;
2787 const struct rb_callinfo *restrict ci,
int max_args)
2789 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2790 if (IS_ARGS_KW_SPLAT(ci)) {
2792 VM_ASSERT(calling->kw_splat == 1);
2796 VALUE ary = cfp->sp[0];
2797 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2800 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2804 if (UNLIKELY(calling->heap_argv)) {
2806 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2807 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2808 calling->kw_splat = 0;
2816 VM_ASSERT(calling->kw_splat == 1);
2820 calling->kw_splat = 0;
2825 VM_ASSERT(calling->kw_splat == 0);
2829 VALUE ary = cfp->sp[0];
2831 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2836 VALUE last_hash, argv_ary;
2837 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2838 if (!IS_ARGS_KEYWORD(ci) &&
2841 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2846 calling->kw_splat = 1;
2852 if (!IS_ARGS_KEYWORD(ci) &&
2853 calling->argc > 0 &&
2855 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2862 cfp->sp[-1] = rb_hash_dup(last_hash);
2863 calling->kw_splat = 1;
2869 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2871 VM_ASSERT(calling->kw_splat == 1);
2872 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2877 calling->kw_splat = 0;
2883 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2885 VM_ASSERT(calling->kw_splat == 0);
2891 vm_caller_setup_arg_kw(cfp, calling, ci);
2895#define USE_OPT_HIST 0
2898#define OPT_HIST_MAX 64
2899static int opt_hist[OPT_HIST_MAX+1];
2903opt_hist_show_results_at_exit(
void)
2905 for (
int i=0; i<OPT_HIST_MAX; i++) {
2906 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2916 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2917 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2918 const int opt = calling->argc - lead_num;
2919 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2920 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2921 const int param = ISEQ_BODY(iseq)->param.size;
2922 const int local = ISEQ_BODY(iseq)->local_table_size;
2923 const int delta = opt_num - opt;
2925 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2928 if (opt_pc < OPT_HIST_MAX) {
2932 opt_hist[OPT_HIST_MAX]++;
2936 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2944 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2945 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2946 const int opt = calling->argc - lead_num;
2947 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2949 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2952 if (opt_pc < OPT_HIST_MAX) {
2956 opt_hist[OPT_HIST_MAX]++;
2960 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2965 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2966 VALUE *
const locals);
2973 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2974 int param_size = ISEQ_BODY(iseq)->param.size;
2975 int local_size = ISEQ_BODY(iseq)->local_table_size;
2978 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2980 local_size = local_size + vm_ci_argc(calling->cd->ci);
2981 param_size = param_size + vm_ci_argc(calling->cd->ci);
2983 cfp->sp[0] = (
VALUE)calling->cd->ci;
2985 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2995 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2996 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2998 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2999 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3001 const int ci_kw_len = kw_arg->keyword_len;
3002 const VALUE *
const ci_keywords = kw_arg->keywords;
3003 VALUE *argv = cfp->sp - calling->argc;
3004 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3005 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3007 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3008 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3010 int param = ISEQ_BODY(iseq)->param.size;
3011 int local = ISEQ_BODY(iseq)->local_table_size;
3012 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3019 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
3022 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
3023 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3025 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3026 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3027 VALUE *
const argv = cfp->sp - calling->argc;
3028 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3031 for (i=0; i<kw_param->num; i++) {
3032 klocals[i] = kw_param->default_values[i];
3039 int param = ISEQ_BODY(iseq)->param.size;
3040 int local = ISEQ_BODY(iseq)->local_table_size;
3041 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3051 cfp->sp -= (calling->argc + 1);
3052 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3053 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3062 set_table *dup_check_table = vm->unused_block_warning_table;
3072 .v = (
VALUE)cme->def,
3076 if (!strict_unused_block) {
3077 key = (st_data_t)cme->def->original_id;
3079 if (set_table_lookup(dup_check_table, key)) {
3089 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3094 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3095 fprintf(stderr,
"key:%p\n", (
void *)key);
3099 if (set_insert(dup_check_table, key)) {
3104 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3106 if (!
NIL_P(m_loc)) {
3107 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3111 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3118 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3123 VM_ASSERT((vm_ci_argc(ci), 1));
3124 VM_ASSERT(vm_cc_cme(cc) != NULL);
3126 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3127 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3128 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3129 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3132 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3133 if (LIKELY(rb_simple_iseq_p(iseq))) {
3135 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3136 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3138 if (calling->argc != lead_num) {
3139 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3143 VM_ASSERT(cc == calling->cc);
3145 if (vm_call_iseq_optimizable_p(ci, cc)) {
3146 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3148 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3149 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3150 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3153 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3158 else if (rb_iseq_only_optparam_p(iseq)) {
3161 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3162 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3164 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3165 const int argc = calling->argc;
3166 const int opt = argc - lead_num;
3168 if (opt < 0 || opt > opt_num) {
3169 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3172 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3173 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3174 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3175 vm_call_cacheable(ci, cc));
3178 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3179 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3180 vm_call_cacheable(ci, cc));
3184 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3185 for (
int i=argc; i<lead_num + opt_num; i++) {
3188 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3190 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3191 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3192 const int argc = calling->argc;
3193 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3195 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3198 if (argc - kw_arg->keyword_len == lead_num) {
3199 const int ci_kw_len = kw_arg->keyword_len;
3200 const VALUE *
const ci_keywords = kw_arg->keywords;
3202 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3204 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3205 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3207 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3208 vm_call_cacheable(ci, cc));
3213 else if (argc == lead_num) {
3215 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3216 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3218 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3220 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3221 vm_call_cacheable(ci, cc));
3247 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3248 bool can_fastpath =
true;
3250 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3252 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3253 ci = vm_ci_new_runtime(
3260 ci = forward_cd->caller_ci;
3262 can_fastpath =
false;
3266 if (!vm_ci_markable(ci)) {
3267 ci = vm_ci_new_runtime(
3272 can_fastpath =
false;
3274 argv[param_size - 1] = (
VALUE)ci;
3275 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3279 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3306 const VALUE * lep = VM_CF_LEP(cfp);
3312 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3317 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3321 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3323 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3324 VALUE * to = cfp->sp - 1;
3328 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3333 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3335 cfp->sp = to + argc;
3354 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3357 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3358 int param_size = ISEQ_BODY(iseq)->param.size;
3359 int local_size = ISEQ_BODY(iseq)->local_table_size;
3361 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3363 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3364 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3370 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3373 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3374 int param_size = ISEQ_BODY(iseq)->param.size;
3375 int local_size = ISEQ_BODY(iseq)->local_table_size;
3377 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3380 local_size = local_size + vm_ci_argc(calling->cd->ci);
3381 param_size = param_size + vm_ci_argc(calling->cd->ci);
3383 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3384 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3389 int opt_pc,
int param_size,
int local_size)
3394 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3395 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3398 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3404 int opt_pc,
int param_size,
int local_size)
3406 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3407 VALUE *argv = cfp->sp - calling->argc;
3408 VALUE *sp = argv + param_size;
3409 cfp->sp = argv - 1 ;
3411 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3412 calling->block_handler, (
VALUE)me,
3413 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3414 local_size - param_size,
3415 ISEQ_BODY(iseq)->stack_max);
3424 VALUE *argv = cfp->sp - calling->argc;
3426 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3427 VALUE *src_argv = argv;
3428 VALUE *sp_orig, *sp;
3429 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3431 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3432 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3433 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3434 dst_captured->code.val = src_captured->code.val;
3435 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3436 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3439 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3443 vm_pop_frame(ec, cfp, cfp->ep);
3446 sp_orig = sp = cfp->sp;
3449 sp[0] = calling->recv;
3453 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3454 *sp++ = src_argv[i];
3457 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3458 calling->recv, calling->block_handler, (
VALUE)me,
3459 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3460 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3461 ISEQ_BODY(iseq)->stack_max);
3469ractor_unsafe_check(
void)
3471 if (!rb_ractor_main_p()) {
3472 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3479 ractor_unsafe_check();
3487 ractor_unsafe_check();
3489 return (*f)(argc, argv, recv);
3495 ractor_unsafe_check();
3503 ractor_unsafe_check();
3505 return (*f)(recv, argv[0]);
3511 ractor_unsafe_check();
3513 return (*f)(recv, argv[0], argv[1]);
3519 ractor_unsafe_check();
3521 return (*f)(recv, argv[0], argv[1], argv[2]);
3527 ractor_unsafe_check();
3529 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3535 ractor_unsafe_check();
3536 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3537 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3543 ractor_unsafe_check();
3544 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3545 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3551 ractor_unsafe_check();
3552 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3553 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3559 ractor_unsafe_check();
3560 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3561 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3567 ractor_unsafe_check();
3568 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3569 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3575 ractor_unsafe_check();
3576 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3577 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3583 ractor_unsafe_check();
3584 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3585 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3591 ractor_unsafe_check();
3592 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3593 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3599 ractor_unsafe_check();
3600 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3601 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3607 ractor_unsafe_check();
3608 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3609 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3615 ractor_unsafe_check();
3616 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3617 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3631 return (*f)(argc, argv, recv);
3645 return (*f)(recv, argv[0]);
3652 return (*f)(recv, argv[0], argv[1]);
3659 return (*f)(recv, argv[0], argv[1], argv[2]);
3666 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3672 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3673 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3679 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3680 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3686 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3687 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3693 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3694 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3700 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3701 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3707 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3708 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3714 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3715 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3721 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3722 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3728 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3729 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3735 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3736 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3742 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3743 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3749 const int ov_flags = RAISED_STACKOVERFLOW;
3750 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3751 if (rb_ec_raised_p(ec, ov_flags)) {
3752 rb_ec_raised_reset(ec, ov_flags);
3758#define CHECK_CFP_CONSISTENCY(func) \
3759 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3760 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3766#if VM_DEBUG_VERIFY_METHOD_CACHE
3767 switch (me->def->type) {
3768 case VM_METHOD_TYPE_CFUNC:
3769 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3771# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3773 METHOD_BUG(ATTRSET);
3775 METHOD_BUG(BMETHOD);
3778 METHOD_BUG(OPTIMIZED);
3779 METHOD_BUG(MISSING);
3780 METHOD_BUG(REFINED);
3784 rb_bug(
"wrong method type: %d", me->def->type);
3787 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3794 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3801 VALUE recv = calling->recv;
3802 VALUE block_handler = calling->block_handler;
3803 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3805 if (UNLIKELY(calling->kw_splat)) {
3806 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3809 VM_ASSERT(reg_cfp == ec->cfp);
3811 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3814 vm_push_frame(ec, NULL, frame_type, recv,
3815 block_handler, (
VALUE)me,
3816 0, ec->cfp->sp, 0, 0);
3818 int len = cfunc->argc;
3821 reg_cfp->sp = stack_bottom;
3822 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3824 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3826 rb_vm_pop_frame(ec);
3828 VM_ASSERT(ec->cfp->sp == stack_bottom);
3830 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3831 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3841 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3843 VALUE *sp = ec->cfp->sp;
3844 VALUE recv = *(sp - recv_idx - 1);
3845 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3846 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3847#if VM_CHECK_MODE > 0
3849 *(GET_EC()->cfp->sp) =
Qfalse;
3851 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3856rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3858 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3864 int argc = calling->argc;
3865 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3866 VALUE *argv = &stack_bottom[1];
3868 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3875 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3877 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3879 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3880 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3883 VALUE *stack_bottom = reg_cfp->sp - 2;
3885 VM_ASSERT(calling->argc == 1);
3889 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3892 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3894 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3901 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3904 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3905 return vm_call_cfunc_other(ec, reg_cfp, calling);
3909 calling->kw_splat = 0;
3911 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3912 VALUE *sp = stack_bottom;
3913 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3914 for(i = 0; i < argc; i++) {
3919 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3925 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3926 VALUE argv_ary = reg_cfp->sp[-1];
3930 int argc_offset = 0;
3932 if (UNLIKELY(argc > 0 &&
3934 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3936 return vm_call_cfunc_other(ec, reg_cfp, calling);
3940 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3946 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3947 VALUE keyword_hash = reg_cfp->sp[-1];
3950 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3953 return vm_call_cfunc_other(ec, reg_cfp, calling);
3960 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3962 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3963 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3965 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3966 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3968 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3970 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3971 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3975 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3976 return vm_call_cfunc_other(ec, reg_cfp, calling);
3983 RB_DEBUG_COUNTER_INC(ccf_ivar);
3985 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3992 RB_DEBUG_COUNTER_INC(ccf_attrset);
3993 VALUE val = *(cfp->sp - 1);
3996 shape_id_t dest_shape_id;
3997 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
3998 ID id = vm_cc_cme(cc)->def->body.attr.id;
3999 rb_check_frozen(obj);
4000 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
4009 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
4010 if (!UNDEF_P(res)) {
4015 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
4023 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4033 VALUE procv = cme->def->body.bmethod.proc;
4036 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4037 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4041 GetProcPtr(procv, proc);
4042 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4052 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4056 VALUE procv = cme->def->body.bmethod.proc;
4059 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4060 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4064 GetProcPtr(procv, proc);
4065 const struct rb_block *block = &proc->block;
4067 while (vm_block_type(block) == block_type_proc) {
4068 block = vm_proc_block(block->as.proc);
4070 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4073 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4074 VALUE *
const argv = cfp->sp - calling->argc;
4075 const int arg_size = ISEQ_BODY(iseq)->param.size;
4078 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4079 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4082 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4087 vm_push_frame(ec, iseq,
4088 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4090 VM_GUARDED_PREV_EP(captured->ep),
4092 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4094 ISEQ_BODY(iseq)->local_table_size - arg_size,
4095 ISEQ_BODY(iseq)->stack_max);
4103 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4107 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4108 if (UNLIKELY(calling->heap_argv)) {
4113 argc = calling->argc;
4116 cfp->sp += - argc - 1;
4119 return vm_call_bmethod_body(ec, calling, argv);
4125 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4129 VALUE procv = cme->def->body.bmethod.proc;
4131 GetProcPtr(procv, proc);
4132 const struct rb_block *block = &proc->block;
4134 while (vm_block_type(block) == block_type_proc) {
4135 block = vm_proc_block(block->as.proc);
4137 if (vm_block_type(block) == block_type_iseq) {
4138 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4139 return vm_call_iseq_bmethod(ec, cfp, calling);
4142 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4143 return vm_call_noniseq_bmethod(ec, cfp, calling);
4147rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4149 VALUE klass = current_class;
4157 while (
RTEST(klass)) {
4159 if (owner == target_owner) {
4165 return current_class;
4174 if (orig_me->defined_class == 0) {
4175 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4176 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4177 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4179 if (me->def->reference_count == 1) {
4180 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4184 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4192 VM_ASSERT(callable_method_entry_p(cme));
4199 return aliased_callable_method_entry(me);
4205 calling->cc = &VM_CC_ON_STACK(
Qundef,
4208 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4210 return vm_call_method_each_type(ec, cfp, calling);
4213static enum method_missing_reason
4216 enum method_missing_reason stat = MISSING_NOENTRY;
4217 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4218 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4219 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4229 ASSUME(calling->argc >= 0);
4231 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4232 int argc = calling->argc;
4233 VALUE recv = calling->recv;
4236 flags |= VM_CALL_OPT_SEND;
4238 if (UNLIKELY(! mid)) {
4239 mid = idMethodMissing;
4240 missing_reason = ci_missing_reason(ci);
4241 ec->method_missing_reason = missing_reason;
4244 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4245 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4246 rb_ary_unshift(argv_ary, symbol);
4249 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4250 VALUE exc = rb_make_no_method_exception(
4272 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4275 argc = ++calling->argc;
4277 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4280 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4281 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4282 VALUE exc = rb_make_no_method_exception(
4295 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4301 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4302 calling->cd = &new_fcd.cd;
4306 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4307 new_fcd.caller_ci = caller_ci;
4310 calling->cc = &VM_CC_ON_STACK(klass,
4312 { .method_missing_reason = missing_reason },
4313 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4315 if (flags & VM_CALL_FCALL) {
4316 return vm_call_method(ec, reg_cfp, calling);
4320 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4322 if (vm_cc_cme(cc) != NULL) {
4323 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4324 case METHOD_VISI_PUBLIC:
4325 return vm_call_method_each_type(ec, reg_cfp, calling);
4326 case METHOD_VISI_PRIVATE:
4327 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4329 case METHOD_VISI_PROTECTED:
4330 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4333 VM_UNREACHABLE(vm_call_method);
4335 return vm_call_method_missing(ec, reg_cfp, calling);
4338 return vm_call_method_nome(ec, reg_cfp, calling);
4348 i = calling->argc - 1;
4350 if (calling->argc == 0) {
4351 rb_raise(rb_eArgError,
"no method name given");
4375 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4381 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4383 int flags = VM_CALL_FCALL;
4387 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4388 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4390 flags |= VM_CALL_ARGS_SPLAT;
4391 if (calling->kw_splat) {
4392 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4393 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4394 calling->kw_splat = 0;
4396 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4399 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4400 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4406 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4407 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4413 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4416 int flags = vm_ci_flag(ci);
4418 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4419 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4420 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4421 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4422 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4423 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4426 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4427 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4432 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4434 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4436 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4437 unsigned int argc, flag;
4439 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4440 argc = ++calling->argc;
4443 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4444 vm_check_canary(ec, reg_cfp->sp);
4448 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4451 ec->method_missing_reason = reason;
4455 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4461 if (!(flag & VM_CALL_FORWARDING)) {
4462 calling->cd = &new_fcd.cd;
4466 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4467 new_fcd.caller_ci = caller_ci;
4471 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4472 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4473 return vm_call_method(ec, reg_cfp, calling);
4479 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4490 return vm_call_method_nome(ec, cfp, calling);
4492 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4493 cme->def->body.refined.orig_me) {
4494 cme = refined_method_callable_without_refinement(cme);
4497 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4499 return vm_call_method_each_type(ec, cfp, calling);
4503find_refinement(
VALUE refinements,
VALUE klass)
4505 if (
NIL_P(refinements)) {
4508 return rb_hash_lookup(refinements, klass);
4517 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4518 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4521 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4522 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4526 }
while (cfp->iseq != local_iseq);
4537 if (orig_me->defined_class == 0) {
4545 VM_ASSERT(callable_method_entry_p(cme));
4547 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4557 ID mid = vm_ci_mid(calling->cd->ci);
4558 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4562 for (; cref; cref = CREF_NEXT(cref)) {
4563 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4564 if (
NIL_P(refinement))
continue;
4567 rb_callable_method_entry(refinement, mid);
4570 if (vm_cc_call(cc) == vm_call_super_method) {
4573 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4578 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4579 cme->def != ref_me->def) {
4582 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4591 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4592 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4607 if (calling->cd->cc) {
4608 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4610 return vm_call_method(ec, cfp, calling);
4613 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4614 calling->cc= ref_cc;
4615 return vm_call_method(ec, cfp, calling);
4619 return vm_call_method_nome(ec, cfp, calling);
4625NOINLINE(
static VALUE
4633 int argc = calling->argc;
4636 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4639 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4645 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4648 VALUE procval = calling->recv;
4649 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4655 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4657 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4660 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4661 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4664 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4665 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4666 return vm_call_general(ec, reg_cfp, calling);
4673 VALUE recv = calling->recv;
4676 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4677 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4679 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4680 return internal_RSTRUCT_GET(recv,
off);
4686 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4688 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4696 VALUE recv = calling->recv;
4699 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4700 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4702 rb_check_frozen(recv);
4704 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4705 internal_RSTRUCT_SET(recv,
off, val);
4713 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4715 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4723#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4724 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4725 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4726 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4728 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4729 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4740 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4741 case OPTIMIZED_METHOD_TYPE_SEND:
4742 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4743 return vm_call_opt_send(ec, cfp, calling);
4744 case OPTIMIZED_METHOD_TYPE_CALL:
4745 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4746 return vm_call_opt_call(ec, cfp, calling);
4747 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4748 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4749 return vm_call_opt_block_call(ec, cfp, calling);
4750 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4751 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4755 VM_CALL_METHOD_ATTR(v,
4756 vm_call_opt_struct_aref(ec, cfp, calling),
4757 set_vm_cc_ivar(cc); \
4758 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4761 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4762 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4766 VM_CALL_METHOD_ATTR(v,
4767 vm_call_opt_struct_aset(ec, cfp, calling),
4768 set_vm_cc_ivar(cc); \
4769 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4773 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4785 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4787 switch (cme->def->type) {
4788 case VM_METHOD_TYPE_ISEQ:
4789 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4790 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4791 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4794 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4795 return vm_call_iseq_setup(ec, cfp, calling);
4798 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4799 case VM_METHOD_TYPE_CFUNC:
4800 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4801 return vm_call_cfunc(ec, cfp, calling);
4803 case VM_METHOD_TYPE_ATTRSET:
4804 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4808 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4810 if (vm_cc_markable(cc)) {
4811 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4812 VM_CALL_METHOD_ATTR(v,
4813 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4814 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4820 VM_CALLCACHE_UNMARKABLE |
4821 VM_CALLCACHE_ON_STACK,
4827 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4832 VM_CALL_METHOD_ATTR(v,
4833 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4834 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4838 case VM_METHOD_TYPE_IVAR:
4839 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4841 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4842 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4843 VM_CALL_METHOD_ATTR(v,
4844 vm_call_ivar(ec, cfp, calling),
4845 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4848 case VM_METHOD_TYPE_MISSING:
4849 vm_cc_method_missing_reason_set(cc, 0);
4850 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4851 return vm_call_method_missing(ec, cfp, calling);
4853 case VM_METHOD_TYPE_BMETHOD:
4854 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4855 return vm_call_bmethod(ec, cfp, calling);
4857 case VM_METHOD_TYPE_ALIAS:
4858 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4859 return vm_call_alias(ec, cfp, calling);
4861 case VM_METHOD_TYPE_OPTIMIZED:
4862 return vm_call_optimized(ec, cfp, calling, ci, cc);
4864 case VM_METHOD_TYPE_UNDEF:
4867 case VM_METHOD_TYPE_ZSUPER:
4868 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4870 case VM_METHOD_TYPE_REFINED:
4873 return vm_call_refined(ec, cfp, calling);
4876 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4886 const int stat = ci_missing_reason(ci);
4888 if (vm_ci_mid(ci) == idMethodMissing) {
4889 if (UNLIKELY(calling->heap_argv)) {
4894 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4895 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4899 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4911 VALUE defined_class = me->defined_class;
4912 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4913 return NIL_P(refined_class) ? defined_class : refined_class;
4922 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4924 if (vm_cc_cme(cc) != NULL) {
4925 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4926 case METHOD_VISI_PUBLIC:
4927 return vm_call_method_each_type(ec, cfp, calling);
4929 case METHOD_VISI_PRIVATE:
4930 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4931 enum method_missing_reason stat = MISSING_PRIVATE;
4932 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4934 vm_cc_method_missing_reason_set(cc, stat);
4935 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4936 return vm_call_method_missing(ec, cfp, calling);
4938 return vm_call_method_each_type(ec, cfp, calling);
4940 case METHOD_VISI_PROTECTED:
4941 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4942 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4944 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4945 return vm_call_method_missing(ec, cfp, calling);
4949 VM_ASSERT(vm_cc_cme(cc) != NULL);
4952 calling->cc = &cc_on_stack;
4953 return vm_call_method_each_type(ec, cfp, calling);
4956 return vm_call_method_each_type(ec, cfp, calling);
4959 rb_bug(
"unreachable");
4963 return vm_call_method_nome(ec, cfp, calling);
4970 RB_DEBUG_COUNTER_INC(ccf_general);
4971 return vm_call_method(ec, reg_cfp, calling);
4977 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4978 VM_ASSERT(cc != vm_cc_empty());
4980 *(vm_call_handler *)&cc->call_ = vm_call_general;
4986 RB_DEBUG_COUNTER_INC(ccf_super_method);
4991 if (ec == NULL) rb_bug(
"unreachable");
4994 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4995 return vm_call_method(ec, reg_cfp, calling);
5001vm_search_normal_superclass(
VALUE klass)
5006 klass =
RBASIC(klass)->klass;
5008 klass = RCLASS_ORIGIN(klass);
5012NORETURN(
static void vm_super_outside(
void));
5015vm_super_outside(
void)
5021empty_cc_for_super(
void)
5023 return &vm_empty_cc_for_super;
5029 VALUE current_defined_class;
5036 current_defined_class = vm_defined_class_for_protected_call(me);
5039 reg_cfp->iseq != method_entry_iseqptr(me) &&
5042 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5046 "self has wrong type to call super in this context: "
5047 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5052 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5054 "implicit argument passing of super from method defined"
5055 " by define_method() is not supported."
5056 " Specify all arguments explicitly.");
5059 ID mid = me->def->original_id;
5061 if (!vm_ci_markable(cd->ci)) {
5062 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5066 cd->ci = vm_ci_new_runtime(mid,
5069 vm_ci_kwarg(cd->ci));
5076 VALUE klass = vm_search_normal_superclass(me->defined_class);
5080 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5084 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5088 if (cached_cme == NULL) {
5090 cd->cc = empty_cc_for_super();
5092 else if (cached_cme->called_id != mid) {
5095 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5099 cd->cc = cc = empty_cc_for_super();
5103 switch (cached_cme->def->type) {
5105 case VM_METHOD_TYPE_REFINED:
5107 case VM_METHOD_TYPE_ATTRSET:
5108 case VM_METHOD_TYPE_IVAR:
5109 vm_cc_call_set(cc, vm_call_super_method);
5117 VM_ASSERT((vm_cc_cme(cc),
true));
5125block_proc_is_lambda(
const VALUE procval)
5130 GetProcPtr(procval, proc);
5131 return proc->is_lambda;
5139block_proc_namespace(
const VALUE procval)
5144 GetProcPtr(procval, proc);
5155 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5158 int is_lambda = FALSE;
5159 VALUE val, arg, blockarg;
5161 const struct vm_ifunc *ifunc = captured->code.ifunc;
5166 else if (argc == 0) {
5173 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5175 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5177 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5180 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5183 VM_GUARDED_PREV_EP(captured->ep),
5185 0, ec->cfp->sp, 0, 0);
5186 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5187 rb_vm_pop_frame(ec);
5195 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5201 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5210 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5212 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5220vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5222 VALUE ary, arg0 = argv[0];
5227 VM_ASSERT(argv[0] == arg0);
5235 if (rb_simple_iseq_p(iseq)) {
5239 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5241 if (arg_setup_type == arg_setup_block &&
5242 calling->argc == 1 &&
5243 ISEQ_BODY(iseq)->param.flags.has_lead &&
5244 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5245 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5246 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5249 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5250 if (arg_setup_type == arg_setup_block) {
5251 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5253 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5254 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5255 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5257 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5258 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5262 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5269 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5278 calling = &calling_entry;
5279 calling->argc = argc;
5280 calling->block_handler = block_handler;
5281 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5283 calling->heap_argv = 0;
5285 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5287 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5295 bool is_lambda,
VALUE block_handler)
5298 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5299 const int arg_size = ISEQ_BODY(iseq)->param.size;
5300 VALUE *
const rsp = GET_SP() - calling->argc;
5301 VALUE *
const argv = rsp;
5302 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5303 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5307 if (calling->proc_ns) {
5308 frame_flag |= VM_FRAME_FLAG_NS_SWITCH;
5311 vm_push_frame(ec, iseq,
5314 VM_GUARDED_PREV_EP(captured->ep), 0,
5315 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5317 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5325 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5327 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5328 int flags = vm_ci_flag(ci);
5330 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5331 ((calling->argc == 0) ||
5332 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5333 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5334 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5335 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5337 if (UNLIKELY(calling->heap_argv)) {
5338#if VM_ARGC_STACK_MAX < 0
5340 rb_raise(rb_eArgError,
"no receiver given");
5346 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5347 reg_cfp->sp[-2] = calling->recv;
5348 flags |= VM_CALL_ARGS_SPLAT;
5351 if (calling->argc < 1) {
5352 rb_raise(rb_eArgError,
"no receiver given");
5354 calling->recv = TOPN(--calling->argc);
5356 if (calling->kw_splat) {
5357 flags |= VM_CALL_KW_SPLAT;
5361 if (calling->argc < 1) {
5362 rb_raise(rb_eArgError,
"no receiver given");
5364 calling->recv = TOPN(--calling->argc);
5367 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5373 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5378 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5379 argc = calling->argc;
5380 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5386vm_proc_to_block_handler(
VALUE procval)
5388 const struct rb_block *block = vm_proc_block(procval);
5390 switch (vm_block_type(block)) {
5391 case block_type_iseq:
5392 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5393 case block_type_ifunc:
5394 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5395 case block_type_symbol:
5396 return VM_BH_FROM_SYMBOL(block->as.symbol);
5397 case block_type_proc:
5398 return VM_BH_FROM_PROC(block->as.proc);
5400 VM_UNREACHABLE(vm_yield_with_proc);
5407 bool is_lambda,
VALUE block_handler)
5409 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5410 VALUE proc = VM_BH_TO_PROC(block_handler);
5411 if (!calling->proc_ns) {
5412 calling->proc_ns = block_proc_namespace(proc);
5414 is_lambda = block_proc_is_lambda(proc);
5415 block_handler = vm_proc_to_block_handler(proc);
5418 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5424 bool is_lambda,
VALUE block_handler)
5428 bool is_lambda,
VALUE block_handler);
5430 switch (vm_block_handler_type(block_handler)) {
5431 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5432 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5433 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5434 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5435 default: rb_bug(
"vm_invoke_block: unreachable");
5438 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5442vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5449 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5452 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5453 captured->code.iseq = blockiseq;
5455 return rb_vm_make_proc(ec, captured,
rb_cProc);
5459vm_once_exec(
VALUE iseq)
5466vm_once_clear(
VALUE data)
5469 is->once.running_thread = NULL;
5481 args[0] = obj; args[1] =
Qfalse;
5483 if (!UNDEF_P(r) &&
RTEST(r)) {
5495 enum defined_type
type = (
enum defined_type)op_type;
5502 return rb_gvar_defined(
SYM2ID(obj));
5504 case DEFINED_CVAR: {
5505 const rb_cref_t *cref = vm_get_cref(GET_EP());
5506 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5511 case DEFINED_CONST_FROM: {
5512 bool allow_nil =
type == DEFINED_CONST;
5514 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5519 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5521 case DEFINED_METHOD:{
5526 switch (METHOD_ENTRY_VISI(me)) {
5527 case METHOD_VISI_PRIVATE:
5529 case METHOD_VISI_PROTECTED:
5533 case METHOD_VISI_PUBLIC:
5537 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5541 return check_respond_to_missing(obj, v);
5546 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5550 case DEFINED_ZSUPER:
5555 VALUE klass = vm_search_normal_superclass(me->defined_class);
5556 if (!klass)
return false;
5558 ID id = me->def->original_id;
5565 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5567 rb_bug(
"unimplemented defined? type (VM)");
5577 return vm_defined(ec, reg_cfp, op_type, obj, v);
5581vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5584 const VALUE *ep = reg_ep;
5585 for (i = 0; i < lv; i++) {
5586 ep = GET_PREV_EP(ep);
5592vm_get_special_object(
const VALUE *
const reg_ep,
5593 enum vm_special_object_type
type)
5596 case VM_SPECIAL_OBJECT_VMCORE:
5597 return rb_mRubyVMFrozenCore;
5598 case VM_SPECIAL_OBJECT_CBASE:
5599 return vm_get_cbase(reg_ep);
5600 case VM_SPECIAL_OBJECT_CONST_BASE:
5601 return vm_get_const_base(reg_ep);
5603 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5610rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5612 return vm_get_special_object(reg_ep,
type);
5618 const VALUE ary2 = ary2st;
5619 VALUE tmp1 = rb_check_to_array(ary1);
5620 VALUE tmp2 = rb_check_to_array(ary2);
5641 const VALUE ary2 = ary2st;
5643 if (
NIL_P(ary2))
return ary1;
5645 VALUE tmp2 = rb_check_to_array(ary2);
5660 return vm_concat_array(ary1, ary2st);
5664rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5666 return vm_concat_to_array(ary1, ary2st);
5675 VALUE tmp = rb_check_to_array(ary);
5679 else if (
RTEST(flag)) {
5692 return vm_splat_array(flag, ary);
5698 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5700 if (flag & VM_CHECKMATCH_ARRAY) {
5704 for (i = 0; i < n; i++) {
5706 VALUE c = check_match(ec, v, target,
type);
5715 return check_match(ec, pattern, target,
type);
5722 return vm_check_match(ec, target, pattern, flag);
5726vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5728 const VALUE kw_bits = *(ep - bits);
5731 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5732 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5745 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5746 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5747 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5748 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5752 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5755 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5758 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5761 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5768vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5773 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5774 return rb_public_const_get_at(cbase,
id);
5782vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5787 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5792 "superclass mismatch for class %"PRIsVALUE
"",
5805vm_check_if_module(
ID id,
VALUE mod)
5824vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5827 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5835vm_declare_module(
ID id,
VALUE cbase)
5841NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5845 VALUE name = rb_id2str(
id);
5846 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5848 VALUE location = rb_const_source_location_at(cbase,
id);
5849 if (!
NIL_P(location)) {
5850 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5851 " previous definition of %"PRIsVALUE
" was here",
5858vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5862 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5864 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5868 vm_check_if_namespace(cbase);
5873 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5874 if (!vm_check_if_class(
id, flags, super, klass))
5875 unmatched_redefinition(
"class", cbase,
id, klass);
5879 return vm_declare_class(
id, flags, cbase, super);
5884vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5888 vm_check_if_namespace(cbase);
5889 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5890 if (!vm_check_if_module(
id, mod))
5891 unmatched_redefinition(
"module", cbase,
id, mod);
5895 return vm_declare_module(
id, cbase);
5900vm_find_or_create_class_by_id(
ID id,
5905 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5908 case VM_DEFINECLASS_TYPE_CLASS:
5910 return vm_define_class(
id, flags, cbase, super);
5912 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5916 case VM_DEFINECLASS_TYPE_MODULE:
5918 return vm_define_module(
id, flags, cbase);
5921 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5925static rb_method_visibility_t
5930 if (!vm_env_cref_by_cref(cfp->ep)) {
5931 return METHOD_VISI_PUBLIC;
5934 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5943 if (!vm_env_cref_by_cref(cfp->ep)) {
5947 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5955 rb_method_visibility_t visi;
5960 visi = METHOD_VISI_PUBLIC;
5963 klass = CREF_CLASS_FOR_DEFINITION(cref);
5964 visi = vm_scope_visibility_get(ec);
5971 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5974 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
5977 if (!is_singleton && vm_scope_module_func_check(ec)) {
5979 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5989 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5991 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5992 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5995 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5999enum method_explorer_type {
6001 mexp_search_invokeblock,
6010 VALUE block_handler,
6011 enum method_explorer_type method_explorer
6016 int argc = vm_ci_argc(ci);
6017 VALUE recv = TOPN(argc);
6019 .block_handler = block_handler,
6020 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
6026 switch (method_explorer) {
6027 case mexp_search_method:
6028 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
6029 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6031 case mexp_search_super:
6032 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
6033 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6035 case mexp_search_invokeblock:
6036 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6053 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6054 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6056 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6058 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6063 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6064 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6075 VALUE bh = VM_BLOCK_HANDLER_NONE;
6076 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6091 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6092 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6094 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6096 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6101 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6102 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6113 VALUE bh = VM_BLOCK_HANDLER_NONE;
6114 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6142 if (check_method_basic_definition(cme)) {
6151 if (check_cfunc(cme, rb_mod_to_s)) {
6157 val = rb_mod_to_s(recv);
6163 if (check_cfunc(cme, rb_nil_to_s)) {
6164 return rb_nil_to_s(recv);
6168 if (check_cfunc(cme, rb_true_to_s)) {
6169 return rb_true_to_s(recv);
6173 if (check_cfunc(cme, rb_false_to_s)) {
6174 return rb_false_to_s(recv);
6178 if (check_cfunc(cme, rb_int_to_s)) {
6179 return rb_fix_to_s(recv);
6191 return vm_objtostring(iseq, recv, cd);
6195vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6197 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6206vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6208 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6217vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6219 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6233 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6237 VALUE args[1] = {target};
6240 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6243 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6250 return vm_opt_duparray_include_p(ec, ary, target);
6256 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6261 VALUE result = *ptr;
6262 rb_snum_t i = num - 1;
6264 const VALUE v = *++ptr;
6265 if (OPTIMIZED_CMP(v, result) > 0) {
6280 return vm_opt_newarray_max(ec, num, ptr);
6286 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6291 VALUE result = *ptr;
6292 rb_snum_t i = num - 1;
6294 const VALUE v = *++ptr;
6295 if (OPTIMIZED_CMP(v, result) < 0) {
6310 return vm_opt_newarray_min(ec, num, ptr);
6317 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6318 return rb_ary_hash_values(num, ptr);
6328 return vm_opt_newarray_hash(ec, num, ptr);
6337 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6339 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6343 VALUE args[1] = {target};
6351 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6357 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6359 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6360 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6370 if (!UNDEF_P(buffer)) {
6371 args[1] = rb_hash_new_with_size(1);
6372 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6377 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6384 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6390 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6396vm_track_constant_cache(
ID id,
void *ic)
6399 struct rb_id_table *const_cache = vm->constant_cache;
6400 VALUE lookup_result;
6403 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6407 ics = set_init_numtable();
6408 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6423 vm->inserting_constant_cache_id = id;
6425 set_insert(ics, (st_data_t)ic);
6427 vm->inserting_constant_cache_id = (
ID)0;
6434 for (
int i = 0; segments[i]; i++) {
6435 ID id = segments[i];
6436 if (
id == idNULL)
continue;
6437 vm_track_constant_cache(
id, ic);
6446 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6447 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6449 return (ic_cref == NULL ||
6450 ic_cref == vm_get_cref(reg_ep));
6458 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6459 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6464rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6466 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6472 if (ruby_vm_const_missing_count > 0) {
6473 ruby_vm_const_missing_count = 0;
6480 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6485 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6486 rb_yjit_constant_ic_update(iseq, ic, pos);
6495 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6498 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6501 ruby_vm_constant_cache_misses++;
6502 val = vm_get_ev_const_chain(ec, segments);
6503 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6506 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6518 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6519 return is->once.value;
6521 else if (is->once.running_thread == NULL) {
6523 is->once.running_thread = th;
6527 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6530 else if (is->once.running_thread == th) {
6532 return vm_once_exec((
VALUE)iseq);
6536 RUBY_VM_CHECK_INTS(ec);
6543vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6545 switch (OBJ_BUILTIN_TYPE(key)) {
6551 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6552 SYMBOL_REDEFINED_OP_FLAG |
6553 INTEGER_REDEFINED_OP_FLAG |
6554 FLOAT_REDEFINED_OP_FLAG |
6555 NIL_REDEFINED_OP_FLAG |
6556 TRUE_REDEFINED_OP_FLAG |
6557 FALSE_REDEFINED_OP_FLAG |
6558 STRING_REDEFINED_OP_FLAG)) {
6562 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6566 if (rb_hash_stlike_lookup(hash, key, &val)) {
6586 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6587 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6588 static const char stack_consistency_error[] =
6589 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6590#if defined RUBY_DEVEL
6591 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6596 rb_bug(stack_consistency_error, nsp, nbp);
6603 if (FIXNUM_2_P(recv, obj) &&
6604 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6605 return rb_fix_plus_fix(recv, obj);
6607 else if (FLONUM_2_P(recv, obj) &&
6608 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6616 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6621 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6622 return rb_str_opt_plus(recv, obj);
6626 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6637 if (FIXNUM_2_P(recv, obj) &&
6638 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6639 return rb_fix_minus_fix(recv, obj);
6641 else if (FLONUM_2_P(recv, obj) &&
6642 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6650 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6661 if (FIXNUM_2_P(recv, obj) &&
6662 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6663 return rb_fix_mul_fix(recv, obj);
6665 else if (FLONUM_2_P(recv, obj) &&
6666 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6674 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6685 if (FIXNUM_2_P(recv, obj) &&
6686 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6687 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6689 else if (FLONUM_2_P(recv, obj) &&
6690 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6691 return rb_flo_div_flo(recv, obj);
6698 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6699 return rb_flo_div_flo(recv, obj);
6709 if (FIXNUM_2_P(recv, obj) &&
6710 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6711 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6713 else if (FLONUM_2_P(recv, obj) &&
6714 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6722 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6733 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6734 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6736 if (!UNDEF_P(val)) {
6737 return RBOOL(!
RTEST(val));
6747 if (FIXNUM_2_P(recv, obj) &&
6748 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6751 else if (FLONUM_2_P(recv, obj) &&
6752 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6760 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6772 if (FIXNUM_2_P(recv, obj) &&
6773 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6776 else if (FLONUM_2_P(recv, obj) &&
6777 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6785 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6797 if (FIXNUM_2_P(recv, obj) &&
6798 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6801 else if (FLONUM_2_P(recv, obj) &&
6802 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6810 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6822 if (FIXNUM_2_P(recv, obj) &&
6823 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6826 else if (FLONUM_2_P(recv, obj) &&
6827 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6835 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6852 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6861 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6879 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6890 if (FIXNUM_2_P(recv, obj) &&
6891 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6903 if (FIXNUM_2_P(recv, obj) &&
6904 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6905 return rb_fix_aref(recv, obj);
6910 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6912 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6915 return rb_ary_aref1(recv, obj);
6919 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6920 return rb_hash_aref(recv, obj);
6934 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6940 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6941 rb_hash_aset(recv, obj, set);
6950vm_opt_length(
VALUE recv,
int bop)
6956 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6957 if (bop == BOP_EMPTY_P) {
6958 return LONG2NUM(RSTRING_LEN(recv));
6965 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6969 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6978vm_opt_empty_p(
VALUE recv)
6980 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6993 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6996 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
7012 case RSHIFT(~0UL, 1):
7015 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7033vm_opt_succ(
VALUE recv)
7036 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7037 return fix_succ(recv);
7043 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7054 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7055 return RBOOL(!
RTEST(recv));
7070 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7074 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7092 VALUE self = GET_SELF();
7094 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7096 if (event & global_hooks->events) {
7099 vm_dtrace(event, ec);
7100 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7106 if (local_hooks != NULL) {
7107 if (event & local_hooks->events) {
7110 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7116#define VM_TRACE_HOOK(target_event, val) do { \
7117 if ((pc_events & (target_event)) & enabled_flags) { \
7118 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7125 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7126 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7127 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7133 const VALUE *pc = reg_cfp->pc;
7134 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7137 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7143 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7146 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7147 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7151 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7152 enabled_flags |= iseq_local_events;
7154 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7156 if (bmethod_frame) {
7158 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7159 bmethod_local_hooks = me->def->body.bmethod.hooks;
7160 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7161 if (bmethod_local_hooks) {
7162 bmethod_local_events = bmethod_local_hooks->events;
7167 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7171 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7179 else if (ec->trace_arg != NULL) {
7187 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7190 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7193 RSTRING_PTR(rb_iseq_path(iseq)),
7194 (
int)rb_iseq_line_no(iseq, pos),
7195 RSTRING_PTR(rb_iseq_label(iseq)));
7197 VM_ASSERT(reg_cfp->pc == pc);
7198 VM_ASSERT(pc_events != 0);
7208 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7209 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7227#if VM_CHECK_MODE > 0
7228NORETURN( NOINLINE( COLDFUNC
7229void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7232Init_vm_stack_canary(
void)
7235 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7236 vm_stack_canary |= 0x01;
7238 vm_stack_canary_was_born =
true;
7243rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7247 const char *insn = rb_insns_name(i);
7251 rb_bug(
"dead canary found at %s: %s", insn, str);
7255void Init_vm_stack_canary(
void) { }
7287 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7294 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7301 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7308 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7315 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7322 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7329 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7336 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7343 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7349 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7350 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7356 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7357 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7363 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7364 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7370 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7371 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7377 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7378 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7384 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7385 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7391 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7392 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7397static builtin_invoker
7398lookup_builtin_invoker(
int argc)
7400 static const builtin_invoker invokers[] = {
7419 return invokers[argc];
7425 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7426 SETUP_CANARY(canary_p);
7427 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7428 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7429 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7436 return invoke_bf(ec, cfp, bf, argv);
7443 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7444 for (
int i=0; i<bf->argc; i++) {
7445 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7447 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7448 (
void *)(uintptr_t)bf->func_ptr);
7451 if (bf->argc == 0) {
7452 return invoke_bf(ec, cfp, bf, NULL);
7455 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7456 return invoke_bf(ec, cfp, bf, argv);
7466 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Internal header for Namespace.
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.