11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
767 const VALUE *ep = cfp->ep;
770 while (!VM_ENV_LOCAL_P(ep)) {
771 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
772 ep = VM_ENV_PREV_EP(ep);
775 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
781 switch (me->def->type) {
782 case VM_METHOD_TYPE_ISEQ:
783 return me->def->body.iseq.
iseqptr;
792 switch (me->def->type) {
793 case VM_METHOD_TYPE_ISEQ:
794 return me->def->body.iseq.
cref;
800#if VM_CHECK_MODE == 0
804check_cref(
VALUE obj,
int can_be_svar)
806 if (obj ==
Qfalse)
return NULL;
809 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
812 switch (imemo_type(obj)) {
823 rb_bug(
"check_method_entry: svar should not be there:");
830vm_env_cref(
const VALUE *ep)
834 while (!VM_ENV_LOCAL_P(ep)) {
835 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
836 ep = VM_ENV_PREV_EP(ep);
839 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
843is_cref(
const VALUE v,
int can_be_svar)
846 switch (imemo_type(v)) {
859vm_env_cref_by_cref(
const VALUE *ep)
861 while (!VM_ENV_LOCAL_P(ep)) {
862 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
863 ep = VM_ENV_PREV_EP(ep);
865 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
869cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
871 const VALUE v = *vptr;
875 switch (imemo_type(v)) {
878 new_cref = vm_cref_dup(cref);
883 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
888 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
892 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
901vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
903 if (vm_env_cref_by_cref(ep)) {
907 while (!VM_ENV_LOCAL_P(ep)) {
908 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
909 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
912 ep = VM_ENV_PREV_EP(ep);
914 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
915 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
918 rb_bug(
"vm_cref_dup: unreachable");
923vm_get_cref(
const VALUE *ep)
931 rb_bug(
"vm_get_cref: unreachable");
936rb_vm_get_cref(
const VALUE *ep)
938 return vm_get_cref(ep);
949 return vm_get_cref(cfp->ep);
953vm_get_const_key_cref(
const VALUE *ep)
959 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
960 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
963 cref = CREF_NEXT(cref);
976 #define ADD_NEW_CREF(new_cref) \
977 if (new_cref_tail) { \
978 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
980 new_cref_head = new_cref; \
982 new_cref_tail = new_cref;
986 if (CREF_CLASS(cref) == old_klass) {
987 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
988 ADD_NEW_CREF(new_cref);
989 return new_cref_head;
991 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
992 cref = CREF_NEXT(cref);
993 ADD_NEW_CREF(new_cref);
999 return new_cref_head;
1008 prev_cref = vm_env_cref(ep);
1014 prev_cref = vm_env_cref(cfp->ep);
1018 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1022vm_get_cbase(
const VALUE *ep)
1024 const rb_cref_t *cref = vm_get_cref(ep);
1026 return CREF_CLASS_FOR_DEFINITION(cref);
1030vm_get_const_base(
const VALUE *ep)
1032 const rb_cref_t *cref = vm_get_cref(ep);
1035 if (!CREF_PUSHED_BY_EVAL(cref)) {
1036 return CREF_CLASS_FOR_DEFINITION(cref);
1038 cref = CREF_NEXT(cref);
1045vm_check_if_namespace(
VALUE klass)
1048 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1053vm_ensure_not_refinement_module(
VALUE self)
1056 rb_warn(
"not defined at the refinement, but at the outer class/module");
1072 if (
NIL_P(orig_klass) && allow_nil) {
1074 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1078 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1079 root_cref = CREF_NEXT(root_cref);
1082 while (cref && CREF_NEXT(cref)) {
1083 if (CREF_PUSHED_BY_EVAL(cref)) {
1087 klass = CREF_CLASS(cref);
1089 cref = CREF_NEXT(cref);
1091 if (!
NIL_P(klass)) {
1095 if ((ce = rb_const_lookup(klass,
id))) {
1096 rb_const_warn_if_deprecated(ce, klass,
id);
1099 if (am == klass)
break;
1101 if (is_defined)
return 1;
1102 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1104 goto search_continue;
1111 if (UNLIKELY(!rb_ractor_main_p())) {
1113 rb_raise(rb_eRactorIsolationError,
1114 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1125 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1126 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1140 vm_check_if_namespace(orig_klass);
1142 return rb_public_const_defined_from(orig_klass,
id);
1145 return rb_public_const_get_from(orig_klass,
id);
1153 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1161 int allow_nil = TRUE;
1162 if (segments[0] == idNULL) {
1167 while (segments[idx]) {
1168 ID id = segments[idx++];
1169 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1182 rb_bug(
"vm_get_cvar_base: no cref");
1185 while (CREF_NEXT(cref) &&
1186 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1187 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1188 cref = CREF_NEXT(cref);
1190 if (top_level_raise && !CREF_NEXT(cref)) {
1194 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1202ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1204fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1207 vm_cc_attr_index_set(cc, index, shape_id);
1210 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1214#define ractor_incidental_shareable_p(cond, val) \
1215 (!(cond) || rb_ractor_shareable_p(val))
1216#define ractor_object_incidental_shareable_p(obj, val) \
1217 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1229 return default_value;
1232 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(obj);
1242 if (UNLIKELY(!rb_ractor_main_p())) {
1250 if (default_value ==
Qundef) {
1258 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1260 return default_value;
1262 ivar_list = rb_imemo_fields_ptr(fields_obj);
1263 shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1268 if (rb_obj_exivar_p(obj)) {
1269 VALUE fields_obj = 0;
1270 if (!rb_gen_fields_tbl_get(obj,
id, &fields_obj)) {
1271 return default_value;
1273 ivar_list = rb_imemo_fields_ptr(fields_obj);
1276 return default_value;
1280 shape_id_t cached_id;
1284 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1287 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1290 if (LIKELY(cached_id == shape_id)) {
1293 if (index == ATTR_INDEX_NOT_SET) {
1294 return default_value;
1297 val = ivar_list[index];
1298#if USE_DEBUG_COUNTER
1299 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1302 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1308#if USE_DEBUG_COUNTER
1310 if (cached_id != INVALID_SHAPE_ID) {
1311 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1314 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1318 if (cached_id != INVALID_SHAPE_ID) {
1319 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1322 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1325 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1328 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1332 if (rb_shape_too_complex_p(shape_id)) {
1337 table = rb_imemo_fields_complex_tbl(fields_obj);
1341 table = ROBJECT_FIELDS_HASH(obj);
1346 if (rb_gen_fields_tbl_get(obj, 0, &fields_obj)) {
1347 table = rb_imemo_fields_complex_tbl(fields_obj);
1353 if (!table || !st_lookup(table,
id, &val)) {
1354 val = default_value;
1358 shape_id_t previous_cached_id = cached_id;
1359 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1362 if (cached_id != previous_cached_id) {
1363 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1366 if (index == ATTR_INDEX_NOT_SET) {
1367 val = default_value;
1371 val = ivar_list[index];
1377 vm_cc_attr_index_initialize(cc, shape_id);
1380 vm_ic_attr_index_initialize(ic, shape_id);
1383 val = default_value;
1389 if (!UNDEF_P(default_value)) {
1398 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1401 return rb_attr_get(obj,
id);
1409populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1411 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1415 vm_cc_attr_index_set(cc, index, next_shape_id);
1418 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1430 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1433 rb_check_frozen(obj);
1435 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1437 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1439 if (!rb_shape_too_complex_p(next_shape_id)) {
1440 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1443 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1453 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1459 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1462NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1464vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1466 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1468 VALUE fields_obj = 0;
1471 if (shape_id == dest_shape_id) {
1472 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1474 else if (dest_shape_id != INVALID_SHAPE_ID) {
1475 if (shape_id == RSHAPE_PARENT(dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1476 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1486 rb_gen_fields_tbl_get(obj, 0, &fields_obj);
1488 if (shape_id != dest_shape_id) {
1489 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1492 RB_OBJ_WRITE(obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1494 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1500vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1508 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1509 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1511 if (LIKELY(shape_id == dest_shape_id)) {
1512 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1515 else if (dest_shape_id != INVALID_SHAPE_ID) {
1516 shape_id_t source_shape_id = RSHAPE_PARENT(dest_shape_id);
1518 if (shape_id == source_shape_id && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1519 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1521 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1523 RUBY_ASSERT(rb_shape_get_next_iv_shape(source_shape_id,
id) == dest_shape_id);
1524 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1539 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1540 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1546 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1558 VALUE defined_class = 0;
1562 defined_class =
RBASIC(defined_class)->klass;
1565 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1567 rb_bug(
"the cvc table should be set");
1571 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1572 rb_bug(
"should have cvar cache entry");
1577 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1593 cref = vm_get_cref(GET_EP());
1595 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1596 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1598 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1604 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1606 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1612 return vm_getclassvariable(iseq, cfp,
id, ic);
1619 cref = vm_get_cref(GET_EP());
1621 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1622 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1624 rb_class_ivar_set(ic->entry->class_value,
id, val);
1628 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1632 update_classvariable_cache(iseq, klass,
id, cref, ic);
1638 vm_setclassvariable(iseq, cfp,
id, val, ic);
1644 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1655 shape_id_t dest_shape_id;
1657 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1659 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1666 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1670 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1677 vm_setinstancevariable(iseq, obj,
id, val, ic);
1686 ec->tag->state = RUBY_TAG_FATAL;
1689 ec->tag->state = TAG_THROW;
1691 else if (THROW_DATA_P(err)) {
1692 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1695 ec->tag->state = TAG_RAISE;
1702 const int flag,
const VALUE throwobj)
1710 else if (state == TAG_BREAK) {
1712 const VALUE *ep = GET_EP();
1713 const rb_iseq_t *base_iseq = GET_ISEQ();
1714 escape_cfp = reg_cfp;
1716 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1717 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1718 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1719 ep = escape_cfp->ep;
1720 base_iseq = escape_cfp->iseq;
1723 ep = VM_ENV_PREV_EP(ep);
1724 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1725 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1726 VM_ASSERT(escape_cfp->iseq == base_iseq);
1730 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1736 ep = VM_ENV_PREV_EP(ep);
1738 while (escape_cfp < eocfp) {
1739 if (escape_cfp->ep == ep) {
1740 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1741 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1746 for (i=0; i < ct->size; i++) {
1748 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1750 if (entry->type == CATCH_TYPE_BREAK &&
1751 entry->iseq == base_iseq &&
1752 entry->start < epc && entry->end >= epc) {
1753 if (entry->cont == epc) {
1762 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1767 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1770 else if (state == TAG_RETRY) {
1771 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1773 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1775 else if (state == TAG_RETURN) {
1776 const VALUE *current_ep = GET_EP();
1777 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1778 int in_class_frame = 0;
1780 escape_cfp = reg_cfp;
1783 while (!VM_ENV_LOCAL_P(ep)) {
1784 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1787 ep = VM_ENV_PREV_EP(ep);
1791 while (escape_cfp < eocfp) {
1792 const VALUE *lep = VM_CF_LEP(escape_cfp);
1798 if (lep == target_lep &&
1799 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1800 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1805 if (lep == target_lep) {
1806 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1808 if (in_class_frame) {
1813 const VALUE *tep = current_ep;
1815 while (target_lep != tep) {
1816 if (escape_cfp->ep == tep) {
1818 if (tep == target_ep) {
1822 goto unexpected_return;
1825 tep = VM_ENV_PREV_EP(tep);
1829 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1830 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1832 case ISEQ_TYPE_MAIN:
1834 if (in_class_frame)
goto unexpected_return;
1835 if (target_ep == NULL) {
1839 goto unexpected_return;
1843 case ISEQ_TYPE_EVAL: {
1845 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1846 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1847 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1848 t = ISEQ_BODY(is)->type;
1850 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1853 case ISEQ_TYPE_CLASS:
1862 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1863 if (target_ep == NULL) {
1867 goto unexpected_return;
1871 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1874 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1880 rb_bug(
"isns(throw): unsupported throw type");
1883 ec->tag->state = state;
1884 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1889 rb_num_t throw_state,
VALUE throwobj)
1891 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1892 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1895 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1898 return vm_throw_continue(ec, throwobj);
1905 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1911 int is_splat = flag & 0x01;
1914 const VALUE obj = ary;
1926 if (num + is_splat == 0) {
1929 else if (flag & 0x02) {
1934 for (i = 0; i < num -
len; i++) {
1939 for (j = 0; i < num; i++, j++) {
1961 for (; i < num -
len; i++) {
1965 for (rb_num_t j = 0; i < num; i++, j++) {
1966 *cfp->sp++ = ptr[
len - j - 1];
1970 for (rb_num_t j = 0; j < num; j++) {
1971 *cfp->sp++ = ptr[num - j - 1];
1987#if VM_CHECK_MODE > 0
1988 ccs->debug_sig = ~(
VALUE)ccs;
1994 ccs->entries = NULL;
1996 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2004 if (! vm_cc_markable(cc)) {
2008 if (UNLIKELY(ccs->len == ccs->capa)) {
2009 if (ccs->capa == 0) {
2011 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2015 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2018 VM_ASSERT(ccs->len < ccs->capa);
2020 const int pos = ccs->len++;
2021 ccs->entries[pos].argc = vm_ci_argc(ci);
2022 ccs->entries[pos].flag = vm_ci_flag(ci);
2025 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2031#if VM_CHECK_MODE > 0
2035 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2036 for (
int i=0; i<ccs->len; i++) {
2037 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2038 ccs->entries[i].flag,
2039 ccs->entries[i].argc);
2040 rp(ccs->entries[i].cc);
2047 VM_ASSERT(vm_ccs_p(ccs));
2048 VM_ASSERT(ccs->len <= ccs->capa);
2050 for (
int i=0; i<ccs->len; i++) {
2053 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2054 VM_ASSERT(vm_cc_class_check(cc, klass));
2055 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2056 VM_ASSERT(!vm_cc_super_p(cc));
2057 VM_ASSERT(!vm_cc_refinement_p(cc));
2068 const ID mid = vm_ci_mid(ci);
2069 struct rb_id_table *cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2076 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2078 const int ccs_len = ccs->len;
2080 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2081 rb_vm_ccs_free(ccs);
2082 rb_id_table_delete(cc_tbl, mid);
2086 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2091 unsigned int argc = vm_ci_argc(ci);
2092 unsigned int flag = vm_ci_flag(ci);
2094 for (
int i=0; i<ccs_len; i++) {
2095 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2096 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2097 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2099 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2101 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2102 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2104 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2105 VM_ASSERT(ccs_cc->klass == klass);
2106 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2115 cc_tbl = rb_id_table_create(2);
2116 RCLASS_WRITE_CC_TBL(klass, cc_tbl);
2119 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2125 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2127 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2130 cme = rb_callable_method_entry(klass, mid);
2133 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2137 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2138 return &vm_empty_cc;
2141 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2146 VM_ASSERT(cc_tbl != NULL);
2148 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2154 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2158 cme = rb_check_overloaded_cme(cme, ci);
2160 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2161 vm_ccs_push(klass, ccs, ci, cc);
2163 VM_ASSERT(vm_cc_cme(cc) != NULL);
2164 VM_ASSERT(cme->called_id == mid);
2165 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2178 cc = vm_search_cc(klass, ci);
2181 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2182 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2183 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2184 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2185 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2194#if USE_DEBUG_COUNTER
2198 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2200#if OPT_INLINE_METHOD_CACHE
2204 if (cd_owner && cc != empty_cc) {
2208#if USE_DEBUG_COUNTER
2209 if (!old_cc || old_cc == empty_cc) {
2211 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2213 else if (old_cc == cc) {
2214 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2216 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2217 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2219 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2220 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2221 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2224 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2229 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2230 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2241#if OPT_INLINE_METHOD_CACHE
2242 if (LIKELY(vm_cc_class_check(cc, klass))) {
2243 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2244 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2245 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2246 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2247 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2248 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2252 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2255 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2259 return vm_search_method_slowpath0(cd_owner, cd, klass);
2266 VM_ASSERT(klass !=
Qfalse);
2269 return vm_search_method_fastpath(cd_owner, cd, klass);
2272#if __has_attribute(transparent_union)
2285 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2286 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2287 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2288 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2289 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2290 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2293# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2296# define make_cfunc_type(f) (cfunc_type)(f)
2306 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2307 VM_ASSERT(callable_method_entry_p(me));
2309 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2313#if __has_attribute(transparent_union)
2314 return me->def->body.cfunc.func == func.anyargs;
2316 return me->def->body.cfunc.func == func;
2325 return me && METHOD_ENTRY_BASIC(me);
2331 VM_ASSERT(iseq != NULL);
2333 return check_cfunc(vm_cc_cme(cc), func);
2336#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2337#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2339#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2371opt_equality_specialized(
VALUE recv,
VALUE obj)
2373 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2374 goto compare_by_identity;
2376 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2377 goto compare_by_identity;
2380 goto compare_by_identity;
2389#if MSC_VERSION_BEFORE(1300)
2393 else if (isnan(b)) {
2398 return RBOOL(a == b);
2405 return rb_str_eql_internal(obj, recv);
2410 compare_by_identity:
2411 return RBOOL(recv == obj);
2417 VM_ASSERT(cd_owner != NULL);
2419 VALUE val = opt_equality_specialized(recv, obj);
2420 if (!UNDEF_P(val))
return val;
2422 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2426 return RBOOL(recv == obj);
2430#undef EQ_UNREDEFINED_P
2433NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2436opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2438 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2440 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2441 return RBOOL(recv == obj);
2451 VALUE val = opt_equality_specialized(recv, obj);
2452 if (!UNDEF_P(val)) {
2456 return opt_equality_by_mid_slowpath(recv, obj, mid);
2463 return opt_equality_by_mid(obj1, obj2, idEq);
2469 return opt_equality_by_mid(obj1, obj2, idEqlP);
2479 case VM_CHECKMATCH_TYPE_WHEN:
2481 case VM_CHECKMATCH_TYPE_RESCUE:
2483 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2486 case VM_CHECKMATCH_TYPE_CASE: {
2487 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2490 rb_bug(
"check_match: unreachable");
2495#if MSC_VERSION_BEFORE(1300)
2496#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2498#define CHECK_CMP_NAN(a, b)
2502double_cmp_lt(
double a,
double b)
2504 CHECK_CMP_NAN(a, b);
2505 return RBOOL(a < b);
2509double_cmp_le(
double a,
double b)
2511 CHECK_CMP_NAN(a, b);
2512 return RBOOL(a <= b);
2516double_cmp_gt(
double a,
double b)
2518 CHECK_CMP_NAN(a, b);
2519 return RBOOL(a > b);
2523double_cmp_ge(
double a,
double b)
2525 CHECK_CMP_NAN(a, b);
2526 return RBOOL(a >= b);
2530static inline VALUE *
2535 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2536 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2538 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2539 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2540 int params = ISEQ_BODY(cfp->iseq)->param.size;
2543 bp += vm_ci_argc(ci);
2546 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2550#if VM_DEBUG_BP_CHECK
2551 if (bp != cfp->bp_check) {
2552 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2553 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2554 (
long)(bp - GET_EC()->vm_stack));
2555 rb_bug(
"vm_base_ptr: unreachable");
2568 return vm_base_ptr(cfp);
2583static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2588 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2590 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2596 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2599 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2600 int param = ISEQ_BODY(iseq)->param.size;
2601 int local = ISEQ_BODY(iseq)->local_table_size;
2602 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2608 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2609 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2610 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2611 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2612 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2613 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2614 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2615 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2619rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2621 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2622 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2623 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2624 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2627 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2628 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2632rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2634 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2635 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2636 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2637 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2638 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2639 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2640 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2643#define ALLOW_HEAP_ARGV (-2)
2644#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2649 vm_check_canary(GET_EC(), cfp->sp);
2655 int argc = calling->argc;
2657 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2661 VALUE *argv = cfp->sp - argc;
2665 cfp->sp -= argc - 1;
2666 cfp->sp[-1] = argv_ary;
2668 calling->heap_argv = argv_ary;
2674 if (max_args >= 0 &&
len + argc > max_args) {
2682 calling->argc +=
len - (max_args - argc + 1);
2683 len = max_args - argc + 1;
2692 calling->heap_argv = 0;
2694 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2696 for (i = 0; i <
len; i++) {
2697 *cfp->sp++ = ptr[i];
2709 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2710 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2711 const VALUE h = rb_hash_new_with_size(kw_len);
2712 VALUE *sp = cfp->sp;
2715 for (i=0; i<kw_len; i++) {
2716 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2720 cfp->sp -= kw_len - 1;
2721 calling->argc -= kw_len - 1;
2722 calling->kw_splat = 1;
2726vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2729 if (keyword_hash !=
Qnil) {
2731 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2734 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2740 keyword_hash = rb_hash_dup(keyword_hash);
2742 return keyword_hash;
2748 const struct rb_callinfo *restrict ci,
int max_args)
2750 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2751 if (IS_ARGS_KW_SPLAT(ci)) {
2753 VM_ASSERT(calling->kw_splat == 1);
2757 VALUE ary = cfp->sp[0];
2758 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2761 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2765 if (UNLIKELY(calling->heap_argv)) {
2767 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2768 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2769 calling->kw_splat = 0;
2777 VM_ASSERT(calling->kw_splat == 1);
2781 calling->kw_splat = 0;
2786 VM_ASSERT(calling->kw_splat == 0);
2790 VALUE ary = cfp->sp[0];
2792 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2797 VALUE last_hash, argv_ary;
2798 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2799 if (!IS_ARGS_KEYWORD(ci) &&
2802 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2807 calling->kw_splat = 1;
2813 if (!IS_ARGS_KEYWORD(ci) &&
2814 calling->argc > 0 &&
2816 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2823 cfp->sp[-1] = rb_hash_dup(last_hash);
2824 calling->kw_splat = 1;
2830 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2832 VM_ASSERT(calling->kw_splat == 1);
2833 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2838 calling->kw_splat = 0;
2844 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2846 VM_ASSERT(calling->kw_splat == 0);
2852 vm_caller_setup_arg_kw(cfp, calling, ci);
2856#define USE_OPT_HIST 0
2859#define OPT_HIST_MAX 64
2860static int opt_hist[OPT_HIST_MAX+1];
2864opt_hist_show_results_at_exit(
void)
2866 for (
int i=0; i<OPT_HIST_MAX; i++) {
2867 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2877 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2878 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2879 const int opt = calling->argc - lead_num;
2880 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2881 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2882 const int param = ISEQ_BODY(iseq)->param.size;
2883 const int local = ISEQ_BODY(iseq)->local_table_size;
2884 const int delta = opt_num - opt;
2886 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2889 if (opt_pc < OPT_HIST_MAX) {
2893 opt_hist[OPT_HIST_MAX]++;
2897 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2905 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2906 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2907 const int opt = calling->argc - lead_num;
2908 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2910 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2913 if (opt_pc < OPT_HIST_MAX) {
2917 opt_hist[OPT_HIST_MAX]++;
2921 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2926 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2927 VALUE *
const locals);
2934 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2935 int param_size = ISEQ_BODY(iseq)->param.size;
2936 int local_size = ISEQ_BODY(iseq)->local_table_size;
2939 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2941 local_size = local_size + vm_ci_argc(calling->cd->ci);
2942 param_size = param_size + vm_ci_argc(calling->cd->ci);
2944 cfp->sp[0] = (
VALUE)calling->cd->ci;
2946 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2956 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2957 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2959 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2960 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2962 const int ci_kw_len = kw_arg->keyword_len;
2963 const VALUE *
const ci_keywords = kw_arg->keywords;
2964 VALUE *argv = cfp->sp - calling->argc;
2965 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2966 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2968 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2969 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
2971 int param = ISEQ_BODY(iseq)->param.size;
2972 int local = ISEQ_BODY(iseq)->local_table_size;
2973 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2980 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2983 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2984 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2986 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2987 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2988 VALUE *
const argv = cfp->sp - calling->argc;
2989 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2992 for (i=0; i<kw_param->num; i++) {
2993 klocals[i] = kw_param->default_values[i];
3000 int param = ISEQ_BODY(iseq)->param.size;
3001 int local = ISEQ_BODY(iseq)->local_table_size;
3002 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3012 cfp->sp -= (calling->argc + 1);
3013 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3014 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3023 set_table *dup_check_table = vm->unused_block_warning_table;
3033 .v = (
VALUE)cme->def,
3037 if (!strict_unused_block) {
3038 key = (st_data_t)cme->def->original_id;
3040 if (set_lookup(dup_check_table, key)) {
3050 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3055 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3056 fprintf(stderr,
"key:%p\n", (
void *)key);
3060 if (set_insert(dup_check_table, key)) {
3065 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3067 if (!
NIL_P(m_loc)) {
3068 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3072 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3079 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3084 VM_ASSERT((vm_ci_argc(ci), 1));
3085 VM_ASSERT(vm_cc_cme(cc) != NULL);
3087 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3088 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3089 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3090 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3093 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3094 if (LIKELY(rb_simple_iseq_p(iseq))) {
3096 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3097 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3099 if (calling->argc != lead_num) {
3100 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3104 VM_ASSERT(cc == calling->cc);
3106 if (vm_call_iseq_optimizable_p(ci, cc)) {
3107 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3109 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3110 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3111 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3114 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3119 else if (rb_iseq_only_optparam_p(iseq)) {
3122 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3123 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3125 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3126 const int argc = calling->argc;
3127 const int opt = argc - lead_num;
3129 if (opt < 0 || opt > opt_num) {
3130 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3133 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3134 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3135 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3136 vm_call_cacheable(ci, cc));
3139 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3140 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3141 vm_call_cacheable(ci, cc));
3145 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3146 for (
int i=argc; i<lead_num + opt_num; i++) {
3149 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3151 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3152 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3153 const int argc = calling->argc;
3154 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3156 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3159 if (argc - kw_arg->keyword_len == lead_num) {
3160 const int ci_kw_len = kw_arg->keyword_len;
3161 const VALUE *
const ci_keywords = kw_arg->keywords;
3163 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3165 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3166 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3168 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3169 vm_call_cacheable(ci, cc));
3174 else if (argc == lead_num) {
3176 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3177 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3179 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3181 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3182 vm_call_cacheable(ci, cc));
3208 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3209 bool can_fastpath =
true;
3211 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3213 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3214 ci = vm_ci_new_runtime(
3221 ci = forward_cd->caller_ci;
3223 can_fastpath =
false;
3227 if (!vm_ci_markable(ci)) {
3228 ci = vm_ci_new_runtime(
3233 can_fastpath =
false;
3235 argv[param_size - 1] = (
VALUE)ci;
3236 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3240 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3267 const VALUE * lep = VM_CF_LEP(cfp);
3273 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3278 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3282 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3284 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3285 VALUE * to = cfp->sp - 1;
3289 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3294 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3296 cfp->sp = to + argc;
3315 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3318 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3319 int param_size = ISEQ_BODY(iseq)->param.size;
3320 int local_size = ISEQ_BODY(iseq)->local_table_size;
3322 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3324 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3325 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3331 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3334 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3335 int param_size = ISEQ_BODY(iseq)->param.size;
3336 int local_size = ISEQ_BODY(iseq)->local_table_size;
3338 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3341 local_size = local_size + vm_ci_argc(calling->cd->ci);
3342 param_size = param_size + vm_ci_argc(calling->cd->ci);
3344 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3345 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3350 int opt_pc,
int param_size,
int local_size)
3355 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3356 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3359 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3365 int opt_pc,
int param_size,
int local_size)
3367 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3368 VALUE *argv = cfp->sp - calling->argc;
3369 VALUE *sp = argv + param_size;
3370 cfp->sp = argv - 1 ;
3372 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3373 calling->block_handler, (
VALUE)me,
3374 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3375 local_size - param_size,
3376 ISEQ_BODY(iseq)->stack_max);
3385 VALUE *argv = cfp->sp - calling->argc;
3387 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3388 VALUE *src_argv = argv;
3389 VALUE *sp_orig, *sp;
3390 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3392 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3393 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3394 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3395 dst_captured->code.val = src_captured->code.val;
3396 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3397 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3400 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3404 vm_pop_frame(ec, cfp, cfp->ep);
3407 sp_orig = sp = cfp->sp;
3410 sp[0] = calling->recv;
3414 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3415 *sp++ = src_argv[i];
3418 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3419 calling->recv, calling->block_handler, (
VALUE)me,
3420 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3421 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3422 ISEQ_BODY(iseq)->stack_max);
3430ractor_unsafe_check(
void)
3432 if (!rb_ractor_main_p()) {
3433 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3440 ractor_unsafe_check();
3448 ractor_unsafe_check();
3450 return (*f)(argc, argv, recv);
3456 ractor_unsafe_check();
3464 ractor_unsafe_check();
3466 return (*f)(recv, argv[0]);
3472 ractor_unsafe_check();
3474 return (*f)(recv, argv[0], argv[1]);
3480 ractor_unsafe_check();
3482 return (*f)(recv, argv[0], argv[1], argv[2]);
3488 ractor_unsafe_check();
3490 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3496 ractor_unsafe_check();
3497 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3498 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3504 ractor_unsafe_check();
3505 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3506 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3512 ractor_unsafe_check();
3513 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3514 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3520 ractor_unsafe_check();
3521 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3522 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3528 ractor_unsafe_check();
3529 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3530 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3536 ractor_unsafe_check();
3537 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3538 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3544 ractor_unsafe_check();
3545 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3546 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3552 ractor_unsafe_check();
3553 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3554 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3560 ractor_unsafe_check();
3561 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3562 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3568 ractor_unsafe_check();
3569 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3570 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3576 ractor_unsafe_check();
3577 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3578 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3592 return (*f)(argc, argv, recv);
3606 return (*f)(recv, argv[0]);
3613 return (*f)(recv, argv[0], argv[1]);
3620 return (*f)(recv, argv[0], argv[1], argv[2]);
3627 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3633 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3634 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3640 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3641 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3647 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3648 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3654 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3655 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3661 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3662 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3668 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3669 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3675 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3676 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3682 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3683 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3689 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3690 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3696 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3697 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3703 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3704 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3710 const int ov_flags = RAISED_STACKOVERFLOW;
3711 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3712 if (rb_ec_raised_p(ec, ov_flags)) {
3713 rb_ec_raised_reset(ec, ov_flags);
3719#define CHECK_CFP_CONSISTENCY(func) \
3720 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3721 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3727#if VM_DEBUG_VERIFY_METHOD_CACHE
3728 switch (me->def->type) {
3729 case VM_METHOD_TYPE_CFUNC:
3730 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3732# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3734 METHOD_BUG(ATTRSET);
3736 METHOD_BUG(BMETHOD);
3739 METHOD_BUG(OPTIMIZED);
3740 METHOD_BUG(MISSING);
3741 METHOD_BUG(REFINED);
3745 rb_bug(
"wrong method type: %d", me->def->type);
3748 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3755 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3762 VALUE recv = calling->recv;
3763 VALUE block_handler = calling->block_handler;
3764 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3766 if (UNLIKELY(calling->kw_splat)) {
3767 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3770 VM_ASSERT(reg_cfp == ec->cfp);
3772 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3775 vm_push_frame(ec, NULL, frame_type, recv,
3776 block_handler, (
VALUE)me,
3777 0, ec->cfp->sp, 0, 0);
3779 int len = cfunc->argc;
3782 reg_cfp->sp = stack_bottom;
3783 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3785 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3787 rb_vm_pop_frame(ec);
3789 VM_ASSERT(ec->cfp->sp == stack_bottom);
3791 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3792 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3802 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3804 VALUE *sp = ec->cfp->sp;
3805 VALUE recv = *(sp - recv_idx - 1);
3806 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3807 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3808#if VM_CHECK_MODE > 0
3810 *(GET_EC()->cfp->sp) =
Qfalse;
3812 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3817rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3819 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3825 int argc = calling->argc;
3826 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3827 VALUE *argv = &stack_bottom[1];
3829 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3836 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3838 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3840 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3841 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3844 VALUE *stack_bottom = reg_cfp->sp - 2;
3846 VM_ASSERT(calling->argc == 1);
3850 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3853 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3855 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3862 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3865 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3866 return vm_call_cfunc_other(ec, reg_cfp, calling);
3870 calling->kw_splat = 0;
3872 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3873 VALUE *sp = stack_bottom;
3874 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3875 for(i = 0; i < argc; i++) {
3880 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3886 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3887 VALUE argv_ary = reg_cfp->sp[-1];
3891 int argc_offset = 0;
3893 if (UNLIKELY(argc > 0 &&
3895 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3897 return vm_call_cfunc_other(ec, reg_cfp, calling);
3901 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3907 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3908 VALUE keyword_hash = reg_cfp->sp[-1];
3911 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3914 return vm_call_cfunc_other(ec, reg_cfp, calling);
3921 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3923 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3924 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3926 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3927 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3929 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3931 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3932 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3936 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3937 return vm_call_cfunc_other(ec, reg_cfp, calling);
3944 RB_DEBUG_COUNTER_INC(ccf_ivar);
3946 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3953 RB_DEBUG_COUNTER_INC(ccf_attrset);
3954 VALUE val = *(cfp->sp - 1);
3957 shape_id_t dest_shape_id;
3958 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
3959 ID id = vm_cc_cme(cc)->def->body.attr.id;
3960 rb_check_frozen(obj);
3961 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3970 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3971 if (!UNDEF_P(res)) {
3976 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3984 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3994 VALUE procv = cme->def->body.bmethod.proc;
3997 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3998 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4002 GetProcPtr(procv, proc);
4003 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4013 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4017 VALUE procv = cme->def->body.bmethod.proc;
4020 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4021 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4025 GetProcPtr(procv, proc);
4026 const struct rb_block *block = &proc->block;
4028 while (vm_block_type(block) == block_type_proc) {
4029 block = vm_proc_block(block->as.proc);
4031 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4034 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4035 VALUE *
const argv = cfp->sp - calling->argc;
4036 const int arg_size = ISEQ_BODY(iseq)->param.size;
4039 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4040 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4043 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4048 vm_push_frame(ec, iseq,
4049 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4051 VM_GUARDED_PREV_EP(captured->ep),
4053 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4055 ISEQ_BODY(iseq)->local_table_size - arg_size,
4056 ISEQ_BODY(iseq)->stack_max);
4064 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4068 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4069 if (UNLIKELY(calling->heap_argv)) {
4074 argc = calling->argc;
4077 cfp->sp += - argc - 1;
4080 return vm_call_bmethod_body(ec, calling, argv);
4086 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4090 VALUE procv = cme->def->body.bmethod.proc;
4092 GetProcPtr(procv, proc);
4093 const struct rb_block *block = &proc->block;
4095 while (vm_block_type(block) == block_type_proc) {
4096 block = vm_proc_block(block->as.proc);
4098 if (vm_block_type(block) == block_type_iseq) {
4099 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4100 return vm_call_iseq_bmethod(ec, cfp, calling);
4103 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4104 return vm_call_noniseq_bmethod(ec, cfp, calling);
4108rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4110 VALUE klass = current_class;
4118 while (
RTEST(klass)) {
4120 if (owner == target_owner) {
4126 return current_class;
4135 if (orig_me->defined_class == 0) {
4136 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4137 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4138 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4140 if (me->def->reference_count == 1) {
4141 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4145 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4153 VM_ASSERT(callable_method_entry_p(cme));
4160 return aliased_callable_method_entry(me);
4166 calling->cc = &VM_CC_ON_STACK(
Qundef,
4169 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4171 return vm_call_method_each_type(ec, cfp, calling);
4174static enum method_missing_reason
4177 enum method_missing_reason stat = MISSING_NOENTRY;
4178 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4179 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4180 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4190 ASSUME(calling->argc >= 0);
4192 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4193 int argc = calling->argc;
4194 VALUE recv = calling->recv;
4197 flags |= VM_CALL_OPT_SEND;
4199 if (UNLIKELY(! mid)) {
4200 mid = idMethodMissing;
4201 missing_reason = ci_missing_reason(ci);
4202 ec->method_missing_reason = missing_reason;
4205 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4206 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4207 rb_ary_unshift(argv_ary, symbol);
4210 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4211 VALUE exc = rb_make_no_method_exception(
4233 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4236 argc = ++calling->argc;
4238 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4241 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4242 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4243 VALUE exc = rb_make_no_method_exception(
4256 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4262 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4263 calling->cd = &new_fcd.cd;
4267 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4268 new_fcd.caller_ci = caller_ci;
4271 calling->cc = &VM_CC_ON_STACK(klass,
4273 { .method_missing_reason = missing_reason },
4274 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4276 if (flags & VM_CALL_FCALL) {
4277 return vm_call_method(ec, reg_cfp, calling);
4281 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4283 if (vm_cc_cme(cc) != NULL) {
4284 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4285 case METHOD_VISI_PUBLIC:
4286 return vm_call_method_each_type(ec, reg_cfp, calling);
4287 case METHOD_VISI_PRIVATE:
4288 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4290 case METHOD_VISI_PROTECTED:
4291 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4294 VM_UNREACHABLE(vm_call_method);
4296 return vm_call_method_missing(ec, reg_cfp, calling);
4299 return vm_call_method_nome(ec, reg_cfp, calling);
4309 i = calling->argc - 1;
4311 if (calling->argc == 0) {
4312 rb_raise(rb_eArgError,
"no method name given");
4336 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4342 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4344 int flags = VM_CALL_FCALL;
4348 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4349 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4351 flags |= VM_CALL_ARGS_SPLAT;
4352 if (calling->kw_splat) {
4353 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4354 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4355 calling->kw_splat = 0;
4357 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4360 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4361 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4367 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4368 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4374 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4377 int flags = vm_ci_flag(ci);
4379 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4380 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4381 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4382 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4383 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4384 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4387 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4388 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4393 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4395 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4397 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4398 unsigned int argc, flag;
4400 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4401 argc = ++calling->argc;
4404 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4405 vm_check_canary(ec, reg_cfp->sp);
4409 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4412 ec->method_missing_reason = reason;
4416 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4422 if (!(flag & VM_CALL_FORWARDING)) {
4423 calling->cd = &new_fcd.cd;
4427 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4428 new_fcd.caller_ci = caller_ci;
4432 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4433 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4434 return vm_call_method(ec, reg_cfp, calling);
4440 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4451 return vm_call_method_nome(ec, cfp, calling);
4453 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4454 cme->def->body.refined.orig_me) {
4455 cme = refined_method_callable_without_refinement(cme);
4458 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4460 return vm_call_method_each_type(ec, cfp, calling);
4464find_refinement(
VALUE refinements,
VALUE klass)
4466 if (
NIL_P(refinements)) {
4469 return rb_hash_lookup(refinements, klass);
4478 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4479 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4482 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4483 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4487 }
while (cfp->iseq != local_iseq);
4498 if (orig_me->defined_class == 0) {
4506 VM_ASSERT(callable_method_entry_p(cme));
4508 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4518 ID mid = vm_ci_mid(calling->cd->ci);
4519 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4523 for (; cref; cref = CREF_NEXT(cref)) {
4524 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4525 if (
NIL_P(refinement))
continue;
4528 rb_callable_method_entry(refinement, mid);
4531 if (vm_cc_call(cc) == vm_call_super_method) {
4534 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4539 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4540 cme->def != ref_me->def) {
4543 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4552 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4553 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4568 if (calling->cd->cc) {
4569 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4571 return vm_call_method(ec, cfp, calling);
4574 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4575 calling->cc= ref_cc;
4576 return vm_call_method(ec, cfp, calling);
4580 return vm_call_method_nome(ec, cfp, calling);
4586NOINLINE(
static VALUE
4594 int argc = calling->argc;
4597 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4600 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4606 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4609 VALUE procval = calling->recv;
4610 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4616 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4618 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4621 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4622 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4625 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4626 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4627 return vm_call_general(ec, reg_cfp, calling);
4634 VALUE recv = calling->recv;
4637 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4638 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4640 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4641 return internal_RSTRUCT_GET(recv,
off);
4647 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4649 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4657 VALUE recv = calling->recv;
4660 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4661 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4663 rb_check_frozen(recv);
4665 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4666 internal_RSTRUCT_SET(recv,
off, val);
4674 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4676 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4684#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4685 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4686 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4687 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4689 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4690 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4701 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4702 case OPTIMIZED_METHOD_TYPE_SEND:
4703 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4704 return vm_call_opt_send(ec, cfp, calling);
4705 case OPTIMIZED_METHOD_TYPE_CALL:
4706 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4707 return vm_call_opt_call(ec, cfp, calling);
4708 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4709 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4710 return vm_call_opt_block_call(ec, cfp, calling);
4711 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4712 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4716 VM_CALL_METHOD_ATTR(v,
4717 vm_call_opt_struct_aref(ec, cfp, calling),
4718 set_vm_cc_ivar(cc); \
4719 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4722 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4723 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4727 VM_CALL_METHOD_ATTR(v,
4728 vm_call_opt_struct_aset(ec, cfp, calling),
4729 set_vm_cc_ivar(cc); \
4730 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4734 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4746 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4748 switch (cme->def->type) {
4749 case VM_METHOD_TYPE_ISEQ:
4750 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4751 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4752 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4755 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4756 return vm_call_iseq_setup(ec, cfp, calling);
4759 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4760 case VM_METHOD_TYPE_CFUNC:
4761 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4762 return vm_call_cfunc(ec, cfp, calling);
4764 case VM_METHOD_TYPE_ATTRSET:
4765 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4769 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4771 if (vm_cc_markable(cc)) {
4772 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4773 VM_CALL_METHOD_ATTR(v,
4774 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4775 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4781 VM_CALLCACHE_UNMARKABLE |
4782 VM_CALLCACHE_ON_STACK,
4788 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4793 VM_CALL_METHOD_ATTR(v,
4794 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4795 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4799 case VM_METHOD_TYPE_IVAR:
4800 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4802 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4803 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4804 VM_CALL_METHOD_ATTR(v,
4805 vm_call_ivar(ec, cfp, calling),
4806 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4809 case VM_METHOD_TYPE_MISSING:
4810 vm_cc_method_missing_reason_set(cc, 0);
4811 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4812 return vm_call_method_missing(ec, cfp, calling);
4814 case VM_METHOD_TYPE_BMETHOD:
4815 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4816 return vm_call_bmethod(ec, cfp, calling);
4818 case VM_METHOD_TYPE_ALIAS:
4819 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4820 return vm_call_alias(ec, cfp, calling);
4822 case VM_METHOD_TYPE_OPTIMIZED:
4823 return vm_call_optimized(ec, cfp, calling, ci, cc);
4825 case VM_METHOD_TYPE_UNDEF:
4828 case VM_METHOD_TYPE_ZSUPER:
4829 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4831 case VM_METHOD_TYPE_REFINED:
4834 return vm_call_refined(ec, cfp, calling);
4837 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4847 const int stat = ci_missing_reason(ci);
4849 if (vm_ci_mid(ci) == idMethodMissing) {
4850 if (UNLIKELY(calling->heap_argv)) {
4855 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4856 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4860 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4872 VALUE defined_class = me->defined_class;
4873 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4874 return NIL_P(refined_class) ? defined_class : refined_class;
4883 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4885 if (vm_cc_cme(cc) != NULL) {
4886 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4887 case METHOD_VISI_PUBLIC:
4888 return vm_call_method_each_type(ec, cfp, calling);
4890 case METHOD_VISI_PRIVATE:
4891 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4892 enum method_missing_reason stat = MISSING_PRIVATE;
4893 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4895 vm_cc_method_missing_reason_set(cc, stat);
4896 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4897 return vm_call_method_missing(ec, cfp, calling);
4899 return vm_call_method_each_type(ec, cfp, calling);
4901 case METHOD_VISI_PROTECTED:
4902 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4903 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4905 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4906 return vm_call_method_missing(ec, cfp, calling);
4910 VM_ASSERT(vm_cc_cme(cc) != NULL);
4913 calling->cc = &cc_on_stack;
4914 return vm_call_method_each_type(ec, cfp, calling);
4917 return vm_call_method_each_type(ec, cfp, calling);
4920 rb_bug(
"unreachable");
4924 return vm_call_method_nome(ec, cfp, calling);
4931 RB_DEBUG_COUNTER_INC(ccf_general);
4932 return vm_call_method(ec, reg_cfp, calling);
4938 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4939 VM_ASSERT(cc != vm_cc_empty());
4941 *(vm_call_handler *)&cc->call_ = vm_call_general;
4947 RB_DEBUG_COUNTER_INC(ccf_super_method);
4952 if (ec == NULL) rb_bug(
"unreachable");
4955 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4956 return vm_call_method(ec, reg_cfp, calling);
4962vm_search_normal_superclass(
VALUE klass)
4967 klass =
RBASIC(klass)->klass;
4969 klass = RCLASS_ORIGIN(klass);
4973NORETURN(
static void vm_super_outside(
void));
4976vm_super_outside(
void)
4982empty_cc_for_super(
void)
4984 return &vm_empty_cc_for_super;
4990 VALUE current_defined_class;
4997 current_defined_class = vm_defined_class_for_protected_call(me);
5000 reg_cfp->iseq != method_entry_iseqptr(me) &&
5003 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5007 "self has wrong type to call super in this context: "
5008 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5013 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5015 "implicit argument passing of super from method defined"
5016 " by define_method() is not supported."
5017 " Specify all arguments explicitly.");
5020 ID mid = me->def->original_id;
5022 if (!vm_ci_markable(cd->ci)) {
5023 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5027 cd->ci = vm_ci_new_runtime(mid,
5030 vm_ci_kwarg(cd->ci));
5037 VALUE klass = vm_search_normal_superclass(me->defined_class);
5041 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5045 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5049 if (cached_cme == NULL) {
5051 cd->cc = empty_cc_for_super();
5053 else if (cached_cme->called_id != mid) {
5056 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5060 cd->cc = cc = empty_cc_for_super();
5064 switch (cached_cme->def->type) {
5066 case VM_METHOD_TYPE_REFINED:
5068 case VM_METHOD_TYPE_ATTRSET:
5069 case VM_METHOD_TYPE_IVAR:
5070 vm_cc_call_set(cc, vm_call_super_method);
5078 VM_ASSERT((vm_cc_cme(cc),
true));
5086block_proc_is_lambda(
const VALUE procval)
5091 GetProcPtr(procval, proc);
5092 return proc->is_lambda;
5100block_proc_namespace(
const VALUE procval)
5105 GetProcPtr(procval, proc);
5116 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5119 int is_lambda = FALSE;
5120 VALUE val, arg, blockarg;
5122 const struct vm_ifunc *ifunc = captured->code.ifunc;
5127 else if (argc == 0) {
5134 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5136 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5138 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5141 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5144 VM_GUARDED_PREV_EP(captured->ep),
5146 0, ec->cfp->sp, 0, 0);
5147 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5148 rb_vm_pop_frame(ec);
5156 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5162 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5171 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5173 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5181vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5183 VALUE ary, arg0 = argv[0];
5188 VM_ASSERT(argv[0] == arg0);
5196 if (rb_simple_iseq_p(iseq)) {
5200 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5202 if (arg_setup_type == arg_setup_block &&
5203 calling->argc == 1 &&
5204 ISEQ_BODY(iseq)->param.flags.has_lead &&
5205 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5206 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5207 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5210 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5211 if (arg_setup_type == arg_setup_block) {
5212 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5214 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5215 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5216 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5218 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5219 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5223 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5230 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5239 calling = &calling_entry;
5240 calling->argc = argc;
5241 calling->block_handler = block_handler;
5242 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5244 calling->heap_argv = 0;
5246 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5248 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5256 bool is_lambda,
VALUE block_handler)
5259 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5260 const int arg_size = ISEQ_BODY(iseq)->param.size;
5261 VALUE *
const rsp = GET_SP() - calling->argc;
5262 VALUE *
const argv = rsp;
5263 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5264 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5268 if (calling->proc_ns) {
5269 frame_flag |= VM_FRAME_FLAG_NS_SWITCH;
5272 vm_push_frame(ec, iseq,
5275 VM_GUARDED_PREV_EP(captured->ep), 0,
5276 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5278 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5286 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5288 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5289 int flags = vm_ci_flag(ci);
5291 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5292 ((calling->argc == 0) ||
5293 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5294 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5295 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5296 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5298 if (UNLIKELY(calling->heap_argv)) {
5299#if VM_ARGC_STACK_MAX < 0
5301 rb_raise(rb_eArgError,
"no receiver given");
5307 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5308 reg_cfp->sp[-2] = calling->recv;
5309 flags |= VM_CALL_ARGS_SPLAT;
5312 if (calling->argc < 1) {
5313 rb_raise(rb_eArgError,
"no receiver given");
5315 calling->recv = TOPN(--calling->argc);
5317 if (calling->kw_splat) {
5318 flags |= VM_CALL_KW_SPLAT;
5322 if (calling->argc < 1) {
5323 rb_raise(rb_eArgError,
"no receiver given");
5325 calling->recv = TOPN(--calling->argc);
5328 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5334 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5339 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5340 argc = calling->argc;
5341 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5347vm_proc_to_block_handler(
VALUE procval)
5349 const struct rb_block *block = vm_proc_block(procval);
5351 switch (vm_block_type(block)) {
5352 case block_type_iseq:
5353 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5354 case block_type_ifunc:
5355 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5356 case block_type_symbol:
5357 return VM_BH_FROM_SYMBOL(block->as.symbol);
5358 case block_type_proc:
5359 return VM_BH_FROM_PROC(block->as.proc);
5361 VM_UNREACHABLE(vm_yield_with_proc);
5368 bool is_lambda,
VALUE block_handler)
5370 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5371 VALUE proc = VM_BH_TO_PROC(block_handler);
5372 if (!calling->proc_ns) {
5373 calling->proc_ns = block_proc_namespace(proc);
5375 is_lambda = block_proc_is_lambda(proc);
5376 block_handler = vm_proc_to_block_handler(proc);
5379 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5385 bool is_lambda,
VALUE block_handler)
5389 bool is_lambda,
VALUE block_handler);
5391 switch (vm_block_handler_type(block_handler)) {
5392 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5393 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5394 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5395 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5396 default: rb_bug(
"vm_invoke_block: unreachable");
5399 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5403vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5410 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5413 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5414 captured->code.iseq = blockiseq;
5416 return rb_vm_make_proc(ec, captured,
rb_cProc);
5420vm_once_exec(
VALUE iseq)
5427vm_once_clear(
VALUE data)
5430 is->once.running_thread = NULL;
5442 args[0] = obj; args[1] =
Qfalse;
5444 if (!UNDEF_P(r) &&
RTEST(r)) {
5456 enum defined_type
type = (
enum defined_type)op_type;
5463 return rb_gvar_defined(
SYM2ID(obj));
5465 case DEFINED_CVAR: {
5466 const rb_cref_t *cref = vm_get_cref(GET_EP());
5467 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5472 case DEFINED_CONST_FROM: {
5473 bool allow_nil =
type == DEFINED_CONST;
5475 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5480 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5482 case DEFINED_METHOD:{
5487 switch (METHOD_ENTRY_VISI(me)) {
5488 case METHOD_VISI_PRIVATE:
5490 case METHOD_VISI_PROTECTED:
5494 case METHOD_VISI_PUBLIC:
5498 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5502 return check_respond_to_missing(obj, v);
5507 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5511 case DEFINED_ZSUPER:
5516 VALUE klass = vm_search_normal_superclass(me->defined_class);
5517 if (!klass)
return false;
5519 ID id = me->def->original_id;
5526 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5528 rb_bug(
"unimplemented defined? type (VM)");
5538 return vm_defined(ec, reg_cfp, op_type, obj, v);
5542vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5545 const VALUE *ep = reg_ep;
5546 for (i = 0; i < lv; i++) {
5547 ep = GET_PREV_EP(ep);
5553vm_get_special_object(
const VALUE *
const reg_ep,
5554 enum vm_special_object_type
type)
5557 case VM_SPECIAL_OBJECT_VMCORE:
5558 return rb_mRubyVMFrozenCore;
5559 case VM_SPECIAL_OBJECT_CBASE:
5560 return vm_get_cbase(reg_ep);
5561 case VM_SPECIAL_OBJECT_CONST_BASE:
5562 return vm_get_const_base(reg_ep);
5564 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5571rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5573 return vm_get_special_object(reg_ep,
type);
5579 const VALUE ary2 = ary2st;
5580 VALUE tmp1 = rb_check_to_array(ary1);
5581 VALUE tmp2 = rb_check_to_array(ary2);
5602 const VALUE ary2 = ary2st;
5604 if (
NIL_P(ary2))
return ary1;
5606 VALUE tmp2 = rb_check_to_array(ary2);
5621 return vm_concat_array(ary1, ary2st);
5625rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5627 return vm_concat_to_array(ary1, ary2st);
5636 VALUE tmp = rb_check_to_array(ary);
5640 else if (
RTEST(flag)) {
5653 return vm_splat_array(flag, ary);
5659 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5661 if (flag & VM_CHECKMATCH_ARRAY) {
5665 for (i = 0; i < n; i++) {
5667 VALUE c = check_match(ec, v, target,
type);
5676 return check_match(ec, pattern, target,
type);
5683 return vm_check_match(ec, target, pattern, flag);
5687vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5689 const VALUE kw_bits = *(ep - bits);
5692 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5693 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5706 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5707 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5708 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5709 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5713 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5716 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5719 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5722 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5729vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5734 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5735 return rb_public_const_get_at(cbase,
id);
5743vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5748 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5753 "superclass mismatch for class %"PRIsVALUE
"",
5766vm_check_if_module(
ID id,
VALUE mod)
5785vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5788 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5796vm_declare_module(
ID id,
VALUE cbase)
5802NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5806 VALUE name = rb_id2str(
id);
5807 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5809 VALUE location = rb_const_source_location_at(cbase,
id);
5810 if (!
NIL_P(location)) {
5811 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5812 " previous definition of %"PRIsVALUE
" was here",
5819vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5823 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5825 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5829 vm_check_if_namespace(cbase);
5834 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5835 if (!vm_check_if_class(
id, flags, super, klass))
5836 unmatched_redefinition(
"class", cbase,
id, klass);
5840 return vm_declare_class(
id, flags, cbase, super);
5845vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5849 vm_check_if_namespace(cbase);
5850 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5851 if (!vm_check_if_module(
id, mod))
5852 unmatched_redefinition(
"module", cbase,
id, mod);
5856 return vm_declare_module(
id, cbase);
5861vm_find_or_create_class_by_id(
ID id,
5866 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5869 case VM_DEFINECLASS_TYPE_CLASS:
5871 return vm_define_class(
id, flags, cbase, super);
5873 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5877 case VM_DEFINECLASS_TYPE_MODULE:
5879 return vm_define_module(
id, flags, cbase);
5882 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5886static rb_method_visibility_t
5891 if (!vm_env_cref_by_cref(cfp->ep)) {
5892 return METHOD_VISI_PUBLIC;
5895 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5904 if (!vm_env_cref_by_cref(cfp->ep)) {
5908 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5916 rb_method_visibility_t visi;
5921 visi = METHOD_VISI_PUBLIC;
5924 klass = CREF_CLASS_FOR_DEFINITION(cref);
5925 visi = vm_scope_visibility_get(ec);
5932 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5935 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
5938 if (!is_singleton && vm_scope_module_func_check(ec)) {
5940 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5950 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5952 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5953 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5956 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5960enum method_explorer_type {
5962 mexp_search_invokeblock,
5971 VALUE block_handler,
5972 enum method_explorer_type method_explorer
5977 int argc = vm_ci_argc(ci);
5978 VALUE recv = TOPN(argc);
5980 .block_handler = block_handler,
5981 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5987 switch (method_explorer) {
5988 case mexp_search_method:
5989 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5990 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5992 case mexp_search_super:
5993 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5994 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5996 case mexp_search_invokeblock:
5997 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6014 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6015 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6017 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6019 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6024 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6025 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6036 VALUE bh = VM_BLOCK_HANDLER_NONE;
6037 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6052 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6053 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6055 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6057 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6062 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6063 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6074 VALUE bh = VM_BLOCK_HANDLER_NONE;
6075 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6103 if (check_method_basic_definition(vm_cc_cme(cc))) {
6112 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6118 val = rb_mod_to_s(recv);
6124 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6125 return rb_nil_to_s(recv);
6129 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6130 return rb_true_to_s(recv);
6134 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6135 return rb_false_to_s(recv);
6139 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6140 return rb_fix_to_s(recv);
6148vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6150 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6159vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6161 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6170vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6172 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6186 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6190 VALUE args[1] = {target};
6193 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6196 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6203 return vm_opt_duparray_include_p(ec, ary, target);
6209 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6214 VALUE result = *ptr;
6215 rb_snum_t i = num - 1;
6217 const VALUE v = *++ptr;
6218 if (OPTIMIZED_CMP(v, result) > 0) {
6233 return vm_opt_newarray_max(ec, num, ptr);
6239 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6244 VALUE result = *ptr;
6245 rb_snum_t i = num - 1;
6247 const VALUE v = *++ptr;
6248 if (OPTIMIZED_CMP(v, result) < 0) {
6263 return vm_opt_newarray_min(ec, num, ptr);
6270 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6271 return rb_ary_hash_values(num, ptr);
6281 return vm_opt_newarray_hash(ec, num, ptr);
6290 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6292 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6296 VALUE args[1] = {target};
6304 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6310 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6312 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6313 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6323 if (!UNDEF_P(buffer)) {
6324 args[1] = rb_hash_new_with_size(1);
6325 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6330 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6337 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6343 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6349vm_track_constant_cache(
ID id,
void *ic)
6352 struct rb_id_table *const_cache = vm->constant_cache;
6353 VALUE lookup_result;
6356 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6360 ics = set_init_numtable();
6361 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6376 vm->inserting_constant_cache_id = id;
6378 set_insert(ics, (st_data_t)ic);
6380 vm->inserting_constant_cache_id = (
ID)0;
6387 for (
int i = 0; segments[i]; i++) {
6388 ID id = segments[i];
6389 if (
id == idNULL)
continue;
6390 vm_track_constant_cache(
id, ic);
6399 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6400 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6402 return (ic_cref == NULL ||
6403 ic_cref == vm_get_cref(reg_ep));
6411 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6412 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6417rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6419 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6425 if (ruby_vm_const_missing_count > 0) {
6426 ruby_vm_const_missing_count = 0;
6433 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6438 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6439 rb_yjit_constant_ic_update(iseq, ic, pos);
6448 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6451 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6454 ruby_vm_constant_cache_misses++;
6455 val = vm_get_ev_const_chain(ec, segments);
6456 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6459 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6471 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6472 return is->once.value;
6474 else if (is->once.running_thread == NULL) {
6476 is->once.running_thread = th;
6480 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6483 else if (is->once.running_thread == th) {
6485 return vm_once_exec((
VALUE)iseq);
6489 RUBY_VM_CHECK_INTS(ec);
6496vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6498 switch (OBJ_BUILTIN_TYPE(key)) {
6504 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6505 SYMBOL_REDEFINED_OP_FLAG |
6506 INTEGER_REDEFINED_OP_FLAG |
6507 FLOAT_REDEFINED_OP_FLAG |
6508 NIL_REDEFINED_OP_FLAG |
6509 TRUE_REDEFINED_OP_FLAG |
6510 FALSE_REDEFINED_OP_FLAG |
6511 STRING_REDEFINED_OP_FLAG)) {
6515 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6519 if (rb_hash_stlike_lookup(hash, key, &val)) {
6539 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6540 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6541 static const char stack_consistency_error[] =
6542 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6543#if defined RUBY_DEVEL
6544 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6549 rb_bug(stack_consistency_error, nsp, nbp);
6556 if (FIXNUM_2_P(recv, obj) &&
6557 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6558 return rb_fix_plus_fix(recv, obj);
6560 else if (FLONUM_2_P(recv, obj) &&
6561 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6569 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6574 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6575 return rb_str_opt_plus(recv, obj);
6579 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6590 if (FIXNUM_2_P(recv, obj) &&
6591 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6592 return rb_fix_minus_fix(recv, obj);
6594 else if (FLONUM_2_P(recv, obj) &&
6595 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6603 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6614 if (FIXNUM_2_P(recv, obj) &&
6615 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6616 return rb_fix_mul_fix(recv, obj);
6618 else if (FLONUM_2_P(recv, obj) &&
6619 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6627 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6638 if (FIXNUM_2_P(recv, obj) &&
6639 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6640 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6642 else if (FLONUM_2_P(recv, obj) &&
6643 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6644 return rb_flo_div_flo(recv, obj);
6651 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6652 return rb_flo_div_flo(recv, obj);
6662 if (FIXNUM_2_P(recv, obj) &&
6663 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6664 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6666 else if (FLONUM_2_P(recv, obj) &&
6667 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6675 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6686 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6687 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6689 if (!UNDEF_P(val)) {
6690 return RBOOL(!
RTEST(val));
6700 if (FIXNUM_2_P(recv, obj) &&
6701 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6704 else if (FLONUM_2_P(recv, obj) &&
6705 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6713 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6725 if (FIXNUM_2_P(recv, obj) &&
6726 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6729 else if (FLONUM_2_P(recv, obj) &&
6730 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6738 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6750 if (FIXNUM_2_P(recv, obj) &&
6751 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6754 else if (FLONUM_2_P(recv, obj) &&
6755 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6763 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6775 if (FIXNUM_2_P(recv, obj) &&
6776 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6779 else if (FLONUM_2_P(recv, obj) &&
6780 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6788 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6805 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6814 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6832 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6843 if (FIXNUM_2_P(recv, obj) &&
6844 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6856 if (FIXNUM_2_P(recv, obj) &&
6857 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6858 return rb_fix_aref(recv, obj);
6863 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6865 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6868 return rb_ary_aref1(recv, obj);
6872 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6873 return rb_hash_aref(recv, obj);
6887 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6893 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6894 rb_hash_aset(recv, obj, set);
6906 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6907 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6908 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6909 return rb_hash_aref(recv, key);
6919 return vm_opt_aref_with(recv, key);
6926 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6927 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6928 return rb_hash_aset(recv, key, val);
6938 return vm_opt_aset_with(recv, key, value);
6942vm_opt_length(
VALUE recv,
int bop)
6948 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6949 if (bop == BOP_EMPTY_P) {
6950 return LONG2NUM(RSTRING_LEN(recv));
6957 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6961 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6970vm_opt_empty_p(
VALUE recv)
6972 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6985 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6988 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
7004 case RSHIFT(~0UL, 1):
7007 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7025vm_opt_succ(
VALUE recv)
7028 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7029 return fix_succ(recv);
7035 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7046 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7047 return RBOOL(!
RTEST(recv));
7062 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7066 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7084 VALUE self = GET_SELF();
7086 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7088 if (event & global_hooks->events) {
7091 vm_dtrace(event, ec);
7092 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7098 if (local_hooks != NULL) {
7099 if (event & local_hooks->events) {
7102 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7108#define VM_TRACE_HOOK(target_event, val) do { \
7109 if ((pc_events & (target_event)) & enabled_flags) { \
7110 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7117 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7118 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7119 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7125 const VALUE *pc = reg_cfp->pc;
7126 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7129 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7135 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7138 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7139 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7143 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7144 enabled_flags |= iseq_local_events;
7146 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7148 if (bmethod_frame) {
7150 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7151 bmethod_local_hooks = me->def->body.bmethod.hooks;
7152 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7153 if (bmethod_local_hooks) {
7154 bmethod_local_events = bmethod_local_hooks->events;
7159 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7163 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7171 else if (ec->trace_arg != NULL) {
7179 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7182 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7185 RSTRING_PTR(rb_iseq_path(iseq)),
7186 (
int)rb_iseq_line_no(iseq, pos),
7187 RSTRING_PTR(rb_iseq_label(iseq)));
7189 VM_ASSERT(reg_cfp->pc == pc);
7190 VM_ASSERT(pc_events != 0);
7200 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7201 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7219#if VM_CHECK_MODE > 0
7220NORETURN( NOINLINE( COLDFUNC
7221void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7224Init_vm_stack_canary(
void)
7227 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7228 vm_stack_canary |= 0x01;
7230 vm_stack_canary_was_born =
true;
7235rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7239 const char *insn = rb_insns_name(i);
7243 rb_bug(
"dead canary found at %s: %s", insn, str);
7247void Init_vm_stack_canary(
void) { }
7279 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7286 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7293 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7300 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7307 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7314 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7321 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7328 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7335 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7341 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7342 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7348 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7349 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7355 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7356 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7362 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7363 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7369 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7370 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7376 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7377 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7383 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7384 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7389static builtin_invoker
7390lookup_builtin_invoker(
int argc)
7392 static const builtin_invoker invokers[] = {
7411 return invokers[argc];
7417 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7418 SETUP_CANARY(canary_p);
7419 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7420 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7421 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7428 return invoke_bf(ec, cfp, bf, argv);
7435 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7436 for (
int i=0; i<bf->argc; i++) {
7437 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7439 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7440 (
void *)(uintptr_t)bf->func_ptr);
7443 if (bf->argc == 0) {
7444 return invoke_bf(ec, cfp, bf, NULL);
7447 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7448 return invoke_bf(ec, cfp, bf, argv);
7458 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Internal header for Namespace.
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.