11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
92 rb_bug(
"system stack overflow during GC. Faulty native extension?");
95 ec->raised_flag = RAISED_STACKOVERFLOW;
96 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
97 EC_JUMP_TAG(ec, TAG_RAISE);
99 ec_stack_overflow(ec, crit == 0);
106callable_class_p(
VALUE klass)
108#if VM_CHECK_MODE >= 2
109 if (!klass)
return FALSE;
137 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
139 if (callable_class_p(cme->defined_class)) {
149vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
151 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
152 enum imemo_type cref_or_me_type = imemo_env;
155 cref_or_me_type = imemo_type(cref_or_me);
157 if (
type & VM_FRAME_FLAG_BMETHOD) {
161 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
162 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
164 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
165 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
169 if (cref_or_me_type != imemo_ment) {
170 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
174 if (req_cref && cref_or_me_type != imemo_cref) {
175 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
178 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
179 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
183 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
189 if (cref_or_me_type == imemo_ment) {
192 if (!callable_method_entry_p(me)) {
193 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
197 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
198 VM_ASSERT(iseq == NULL ||
200 RUBY_VM_NORMAL_ISEQ_P(iseq)
204 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
214 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
217#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
219 vm_check_frame_detail(type, req_block, req_me, req_cref, \
220 specval, cref_or_me, is_cframe, iseq); \
222 switch (given_magic) {
224 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
226 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
227 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
230 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
231 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
232 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
234 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
239static VALUE vm_stack_canary;
240static bool vm_stack_canary_was_born =
false;
247 unsigned int pos = 0;
248 while (pos < ISEQ_BODY(iseq)->iseq_size) {
249 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
250 unsigned int next_pos = pos + insn_len(opcode);
251 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
256 rb_bug(
"failed to find the previous insn");
265 if (! LIKELY(vm_stack_canary_was_born)) {
268 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
272 else if (! (iseq = GET_ISEQ())) {
275 else if (LIKELY(sp[0] != vm_stack_canary)) {
284 const VALUE *orig = rb_iseq_original_iseq(iseq);
285 const VALUE iseqw = rb_iseqw_new(iseq);
287 const char *stri = rb_str_to_cstr(inspection);
288 const VALUE disasm = rb_iseq_disasm(iseq);
289 const char *strd = rb_str_to_cstr(disasm);
290 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
291 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
292 const char *name = insn_name(insn);
298 "We are killing the stack canary set by %s, "
299 "at %s@pc=%"PRIdPTR
"\n"
300 "watch out the C stack trace.\n"
302 name, stri, pos, strd);
303 rb_bug(
"see above.");
305#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
308#define vm_check_canary(ec, sp)
309#define vm_check_frame(a, b, c, d)
314vm_push_frame_debug_counter_inc(
321 RB_DEBUG_COUNTER_INC(frame_push);
323 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
324 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
325 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
328 RB_DEBUG_COUNTER_INC(frame_R2R);
331 RB_DEBUG_COUNTER_INC(frame_R2C);
336 RB_DEBUG_COUNTER_INC(frame_C2R);
339 RB_DEBUG_COUNTER_INC(frame_C2C);
344 switch (
type & VM_FRAME_MAGIC_MASK) {
345 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
346 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
347 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
348 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
349 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
350 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
351 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
352 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
353 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
356 rb_bug(
"unreachable");
359#define vm_push_frame_debug_counter_inc(ec, cfp, t)
364rb_vm_stack_canary(
void)
367 return vm_stack_canary;
373STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
374STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
375STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
391 vm_check_frame(
type, specval, cref_or_me, iseq);
392 VM_ASSERT(local_size >= 0);
395 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
396 vm_check_canary(ec, sp);
401 for (
int i=0; i < local_size; i++) {
428 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
429 atomic_signal_fence(memory_order_seq_cst);
437 vm_push_frame_debug_counter_inc(ec, cfp,
type);
445 if (VMDEBUG == 2) SDR();
447 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
454 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
456 if (VMDEBUG == 2) SDR();
458 RUBY_VM_CHECK_INTS(ec);
459 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
461 return flags & VM_FRAME_FLAG_FINISH;
467 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
475 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
479 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
481 VM_BLOCK_HANDLER_NONE,
488 return (
VALUE)dmy_iseq;
493rb_arity_error_new(
int argc,
int min,
int max)
495 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
503 rb_str_catf(err_mess,
"..%d", max);
510rb_error_arity(
int argc,
int min,
int max)
517NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
520vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
523 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
524 VM_FORCE_WRITE(&ep[index], v);
525 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
526 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
531vm_env_write(
const VALUE *ep,
int index,
VALUE v)
533 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
534 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
535 VM_STACK_ENV_WRITE(ep, index, v);
538 vm_env_write_slowpath(ep, index, v);
543rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
545 vm_env_write(ep, index, v);
551 if (block_handler == VM_BLOCK_HANDLER_NONE) {
555 switch (vm_block_handler_type(block_handler)) {
556 case block_handler_type_iseq:
557 case block_handler_type_ifunc:
558 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
559 case block_handler_type_symbol:
560 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
561 case block_handler_type_proc:
562 return VM_BH_TO_PROC(block_handler);
564 VM_UNREACHABLE(rb_vm_bh_to_procval);
573vm_svar_valid_p(
VALUE svar)
576 switch (imemo_type(svar)) {
585 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
595 if (lep && (ec == NULL || ec->root_lep != lep)) {
596 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
599 svar = ec->root_svar;
602 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
610 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
612 if (lep && (ec == NULL || ec->root_lep != lep)) {
613 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
616 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
623 const struct vm_svar *svar = lep_svar(ec, lep);
628 case VM_SVAR_LASTLINE:
629 return svar->lastline;
630 case VM_SVAR_BACKREF:
631 return svar->backref;
633 const VALUE ary = svar->others;
648 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
659 struct vm_svar *svar = lep_svar(ec, lep);
662 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
666 case VM_SVAR_LASTLINE:
669 case VM_SVAR_BACKREF:
673 VALUE ary = svar->others;
689 val = lep_svar_get(ec, lep, key);
692 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
709 rb_bug(
"unexpected back-ref");
722 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
732 return rb_reg_last_defined(backref);
734 rb_bug(
"unexpected back-ref");
738 nth = (int)(
type >> 1);
745check_method_entry(
VALUE obj,
int can_be_svar)
747 if (obj ==
Qfalse)
return NULL;
750 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
753 switch (imemo_type(obj)) {
764 rb_bug(
"check_method_entry: svar should not be there:");
773 const VALUE *ep = cfp->ep;
776 while (!VM_ENV_LOCAL_P(ep)) {
777 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
778 ep = VM_ENV_PREV_EP(ep);
781 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
787 switch (me->def->type) {
788 case VM_METHOD_TYPE_ISEQ:
789 return me->def->body.iseq.
iseqptr;
798 switch (me->def->type) {
799 case VM_METHOD_TYPE_ISEQ:
800 return me->def->body.iseq.
cref;
806#if VM_CHECK_MODE == 0
810check_cref(
VALUE obj,
int can_be_svar)
812 if (obj ==
Qfalse)
return NULL;
815 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
818 switch (imemo_type(obj)) {
829 rb_bug(
"check_method_entry: svar should not be there:");
836vm_env_cref(
const VALUE *ep)
840 while (!VM_ENV_LOCAL_P(ep)) {
841 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
842 ep = VM_ENV_PREV_EP(ep);
845 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
849is_cref(
const VALUE v,
int can_be_svar)
852 switch (imemo_type(v)) {
865vm_env_cref_by_cref(
const VALUE *ep)
867 while (!VM_ENV_LOCAL_P(ep)) {
868 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
869 ep = VM_ENV_PREV_EP(ep);
871 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
875cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
877 const VALUE v = *vptr;
881 switch (imemo_type(v)) {
884 new_cref = vm_cref_dup(cref);
889 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
894 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
898 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
907vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
909 if (vm_env_cref_by_cref(ep)) {
913 while (!VM_ENV_LOCAL_P(ep)) {
914 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
915 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
918 ep = VM_ENV_PREV_EP(ep);
920 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
921 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
924 rb_bug(
"vm_cref_dup: unreachable");
929vm_get_cref(
const VALUE *ep)
937 rb_bug(
"vm_get_cref: unreachable");
942rb_vm_get_cref(
const VALUE *ep)
944 return vm_get_cref(ep);
955 return vm_get_cref(cfp->ep);
959vm_get_const_key_cref(
const VALUE *ep)
965 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
966 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
969 cref = CREF_NEXT(cref);
982 if (CREF_CLASS(cref) == old_klass) {
983 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
984 *new_cref_ptr = new_cref;
987 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
988 cref = CREF_NEXT(cref);
989 *new_cref_ptr = new_cref;
990 new_cref_ptr = &new_cref->next;
992 *new_cref_ptr = NULL;
1001 prev_cref = vm_env_cref(ep);
1007 prev_cref = vm_env_cref(cfp->ep);
1011 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1015vm_get_cbase(
const VALUE *ep)
1017 const rb_cref_t *cref = vm_get_cref(ep);
1019 return CREF_CLASS_FOR_DEFINITION(cref);
1023vm_get_const_base(
const VALUE *ep)
1025 const rb_cref_t *cref = vm_get_cref(ep);
1028 if (!CREF_PUSHED_BY_EVAL(cref)) {
1029 return CREF_CLASS_FOR_DEFINITION(cref);
1031 cref = CREF_NEXT(cref);
1038vm_check_if_namespace(
VALUE klass)
1041 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1046vm_ensure_not_refinement_module(
VALUE self)
1049 rb_warn(
"not defined at the refinement, but at the outer class/module");
1065 if (
NIL_P(orig_klass) && allow_nil) {
1067 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1071 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1072 root_cref = CREF_NEXT(root_cref);
1075 while (cref && CREF_NEXT(cref)) {
1076 if (CREF_PUSHED_BY_EVAL(cref)) {
1080 klass = CREF_CLASS(cref);
1082 cref = CREF_NEXT(cref);
1084 if (!
NIL_P(klass)) {
1088 if ((ce = rb_const_lookup(klass,
id))) {
1089 rb_const_warn_if_deprecated(ce, klass,
id);
1092 if (am == klass)
break;
1094 if (is_defined)
return 1;
1095 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1097 goto search_continue;
1104 if (UNLIKELY(!rb_ractor_main_p())) {
1106 rb_raise(rb_eRactorIsolationError,
1107 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1118 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1119 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1133 vm_check_if_namespace(orig_klass);
1135 return rb_public_const_defined_from(orig_klass,
id);
1138 return rb_public_const_get_from(orig_klass,
id);
1146 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1154 int allow_nil = TRUE;
1155 if (segments[0] == idNULL) {
1160 while (segments[idx]) {
1161 ID id = segments[idx++];
1162 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1175 rb_bug(
"vm_get_cvar_base: no cref");
1178 while (CREF_NEXT(cref) &&
1179 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1180 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1181 cref = CREF_NEXT(cref);
1183 if (top_level_raise && !CREF_NEXT(cref)) {
1187 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1195ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1197fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1200 vm_cc_attr_index_set(cc, index, shape_id);
1203 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1207#define ractor_incidental_shareable_p(cond, val) \
1208 (!(cond) || rb_ractor_shareable_p(val))
1209#define ractor_object_incidental_shareable_p(obj, val) \
1210 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1221 return default_value;
1224 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(obj);
1234 if (UNLIKELY(!rb_ractor_main_p())) {
1242 if (default_value ==
Qundef) {
1250 ivar_list = RCLASS_PRIME_FIELDS(obj);
1256 rb_gen_fields_tbl_get(obj,
id, &fields_tbl);
1257 ivar_list = fields_tbl->as.shape.fields;
1260 return default_value;
1264 shape_id_t cached_id;
1268 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1271 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1274 if (LIKELY(cached_id == shape_id)) {
1277 if (index == ATTR_INDEX_NOT_SET) {
1278 return default_value;
1281 val = ivar_list[index];
1282#if USE_DEBUG_COUNTER
1283 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1286 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1292#if USE_DEBUG_COUNTER
1294 if (cached_id != INVALID_SHAPE_ID) {
1295 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1298 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1302 if (cached_id != INVALID_SHAPE_ID) {
1303 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1306 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1309 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1312 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1316 if (rb_shape_too_complex_p(shape_id)) {
1321 table = (
st_table *)RCLASS_FIELDS_HASH(obj);
1325 table = ROBJECT_FIELDS_HASH(obj);
1330 if (rb_gen_fields_tbl_get(obj, 0, &fields_tbl)) {
1331 table = fields_tbl->as.complex.table;
1337 if (!table || !st_lookup(table,
id, &val)) {
1338 val = default_value;
1342 shape_id_t previous_cached_id = cached_id;
1343 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1346 if (cached_id != previous_cached_id) {
1347 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1350 if (index == ATTR_INDEX_NOT_SET) {
1351 val = default_value;
1355 val = ivar_list[index];
1361 vm_cc_attr_index_initialize(cc, shape_id);
1364 vm_ic_attr_index_initialize(ic, shape_id);
1367 val = default_value;
1373 if (!UNDEF_P(default_value)) {
1381 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1384 return rb_attr_get(obj,
id);
1392populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1394 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1398 vm_cc_attr_index_set(cc, index, next_shape_id);
1401 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1413 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1416 rb_check_frozen(obj);
1418 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1420 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1422 if (!rb_shape_too_complex_p(next_shape_id)) {
1423 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1426 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1436 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1442 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1445NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1447vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1449 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1454 if (shape_id == dest_shape_id) {
1455 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1457 else if (dest_shape_id != INVALID_SHAPE_ID) {
1458 rb_shape_t *dest_shape = RSHAPE(dest_shape_id);
1460 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1461 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1471 rb_gen_fields_tbl_get(obj, 0, &fields_tbl);
1473 if (shape_id != dest_shape_id) {
1474 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1477 RB_OBJ_WRITE(obj, &fields_tbl->as.shape.fields[index], val);
1479 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1485vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1493 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1494 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1496 if (LIKELY(shape_id == dest_shape_id)) {
1497 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1500 else if (dest_shape_id != INVALID_SHAPE_ID) {
1501 rb_shape_t *dest_shape = RSHAPE(dest_shape_id);
1502 shape_id_t source_shape_id = dest_shape->parent_id;
1504 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1505 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1507 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1509 RUBY_ASSERT(rb_shape_get_next_iv_shape(source_shape_id,
id) == dest_shape_id);
1510 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1525 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1526 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1532 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1544 VALUE defined_class = 0;
1548 defined_class =
RBASIC(defined_class)->klass;
1551 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1553 rb_bug(
"the cvc table should be set");
1557 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1558 rb_bug(
"should have cvar cache entry");
1563 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1579 cref = vm_get_cref(GET_EP());
1581 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1582 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1584 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1590 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1592 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1598 return vm_getclassvariable(iseq, cfp,
id, ic);
1605 cref = vm_get_cref(GET_EP());
1607 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1608 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1610 rb_class_ivar_set(ic->entry->class_value,
id, val);
1614 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1618 update_classvariable_cache(iseq, klass,
id, cref, ic);
1624 vm_setclassvariable(iseq, cfp,
id, val, ic);
1630 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1641 shape_id_t dest_shape_id;
1643 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1645 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1652 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1656 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1663 vm_setinstancevariable(iseq, obj,
id, val, ic);
1672 ec->tag->state = RUBY_TAG_FATAL;
1675 ec->tag->state = TAG_THROW;
1677 else if (THROW_DATA_P(err)) {
1678 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1681 ec->tag->state = TAG_RAISE;
1688 const int flag,
const VALUE throwobj)
1696 else if (state == TAG_BREAK) {
1698 const VALUE *ep = GET_EP();
1699 const rb_iseq_t *base_iseq = GET_ISEQ();
1700 escape_cfp = reg_cfp;
1702 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1703 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1704 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1705 ep = escape_cfp->ep;
1706 base_iseq = escape_cfp->iseq;
1709 ep = VM_ENV_PREV_EP(ep);
1710 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1711 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1712 VM_ASSERT(escape_cfp->iseq == base_iseq);
1716 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1722 ep = VM_ENV_PREV_EP(ep);
1724 while (escape_cfp < eocfp) {
1725 if (escape_cfp->ep == ep) {
1726 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1727 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1732 for (i=0; i < ct->size; i++) {
1734 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1736 if (entry->type == CATCH_TYPE_BREAK &&
1737 entry->iseq == base_iseq &&
1738 entry->start < epc && entry->end >= epc) {
1739 if (entry->cont == epc) {
1748 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1753 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1756 else if (state == TAG_RETRY) {
1757 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1759 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1761 else if (state == TAG_RETURN) {
1762 const VALUE *current_ep = GET_EP();
1763 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1764 int in_class_frame = 0;
1766 escape_cfp = reg_cfp;
1769 while (!VM_ENV_LOCAL_P(ep)) {
1770 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1773 ep = VM_ENV_PREV_EP(ep);
1777 while (escape_cfp < eocfp) {
1778 const VALUE *lep = VM_CF_LEP(escape_cfp);
1784 if (lep == target_lep &&
1785 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1786 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1791 if (lep == target_lep) {
1792 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1794 if (in_class_frame) {
1799 const VALUE *tep = current_ep;
1801 while (target_lep != tep) {
1802 if (escape_cfp->ep == tep) {
1804 if (tep == target_ep) {
1808 goto unexpected_return;
1811 tep = VM_ENV_PREV_EP(tep);
1815 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1816 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1818 case ISEQ_TYPE_MAIN:
1820 if (in_class_frame)
goto unexpected_return;
1821 if (target_ep == NULL) {
1825 goto unexpected_return;
1829 case ISEQ_TYPE_EVAL: {
1831 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1832 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1833 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1834 t = ISEQ_BODY(is)->type;
1836 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1839 case ISEQ_TYPE_CLASS:
1848 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1849 if (target_ep == NULL) {
1853 goto unexpected_return;
1857 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1860 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1866 rb_bug(
"isns(throw): unsupported throw type");
1869 ec->tag->state = state;
1870 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1875 rb_num_t throw_state,
VALUE throwobj)
1877 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1878 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1881 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1884 return vm_throw_continue(ec, throwobj);
1891 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1897 int is_splat = flag & 0x01;
1900 const VALUE obj = ary;
1912 if (num + is_splat == 0) {
1915 else if (flag & 0x02) {
1920 for (i = 0; i < num -
len; i++) {
1925 for (j = 0; i < num; i++, j++) {
1947 for (; i < num -
len; i++) {
1951 for (rb_num_t j = 0; i < num; i++, j++) {
1952 *cfp->sp++ = ptr[
len - j - 1];
1956 for (rb_num_t j = 0; j < num; j++) {
1957 *cfp->sp++ = ptr[num - j - 1];
1973#if VM_CHECK_MODE > 0
1974 ccs->debug_sig = ~(
VALUE)ccs;
1980 ccs->entries = NULL;
1982 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
1990 if (! vm_cc_markable(cc)) {
1994 if (UNLIKELY(ccs->len == ccs->capa)) {
1995 if (ccs->capa == 0) {
1997 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2001 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2004 VM_ASSERT(ccs->len < ccs->capa);
2006 const int pos = ccs->len++;
2007 ccs->entries[pos].argc = vm_ci_argc(ci);
2008 ccs->entries[pos].flag = vm_ci_flag(ci);
2011 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2017#if VM_CHECK_MODE > 0
2021 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2022 for (
int i=0; i<ccs->len; i++) {
2023 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2024 ccs->entries[i].flag,
2025 ccs->entries[i].argc);
2026 rp(ccs->entries[i].cc);
2033 VM_ASSERT(vm_ccs_p(ccs));
2034 VM_ASSERT(ccs->len <= ccs->capa);
2036 for (
int i=0; i<ccs->len; i++) {
2039 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2040 VM_ASSERT(vm_cc_class_check(cc, klass));
2041 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2042 VM_ASSERT(!vm_cc_super_p(cc));
2043 VM_ASSERT(!vm_cc_refinement_p(cc));
2054 const ID mid = vm_ci_mid(ci);
2055 struct rb_id_table *cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2062 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2064 const int ccs_len = ccs->len;
2066 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2067 rb_vm_ccs_free(ccs);
2068 rb_id_table_delete(cc_tbl, mid);
2072 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2077 unsigned int argc = vm_ci_argc(ci);
2078 unsigned int flag = vm_ci_flag(ci);
2080 for (
int i=0; i<ccs_len; i++) {
2081 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2082 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2083 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2085 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2087 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2088 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2090 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2091 VM_ASSERT(ccs_cc->klass == klass);
2092 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2101 cc_tbl = rb_id_table_create(2);
2102 RCLASS_WRITE_CC_TBL(klass, cc_tbl);
2105 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2111 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2113 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2116 cme = rb_callable_method_entry(klass, mid);
2119 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2123 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2124 return &vm_empty_cc;
2127 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2132 VM_ASSERT(cc_tbl != NULL);
2134 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2140 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2144 cme = rb_check_overloaded_cme(cme, ci);
2146 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2147 vm_ccs_push(klass, ccs, ci, cc);
2149 VM_ASSERT(vm_cc_cme(cc) != NULL);
2150 VM_ASSERT(cme->called_id == mid);
2151 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2164 cc = vm_search_cc(klass, ci);
2167 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2168 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2169 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2170 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2171 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2180#if USE_DEBUG_COUNTER
2184 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2186#if OPT_INLINE_METHOD_CACHE
2190 if (cd_owner && cc != empty_cc) {
2194#if USE_DEBUG_COUNTER
2195 if (!old_cc || old_cc == empty_cc) {
2197 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2199 else if (old_cc == cc) {
2200 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2202 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2203 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2205 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2206 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2207 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2210 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2215 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2216 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2227#if OPT_INLINE_METHOD_CACHE
2228 if (LIKELY(vm_cc_class_check(cc, klass))) {
2229 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2230 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2231 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2232 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2233 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2234 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2238 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2241 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2245 return vm_search_method_slowpath0(cd_owner, cd, klass);
2252 VM_ASSERT(klass !=
Qfalse);
2255 return vm_search_method_fastpath(cd_owner, cd, klass);
2258#if __has_attribute(transparent_union)
2271 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2272 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2273 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2274 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2275 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2276 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2279# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2282# define make_cfunc_type(f) (cfunc_type)(f)
2292 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2293 VM_ASSERT(callable_method_entry_p(me));
2295 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2299#if __has_attribute(transparent_union)
2300 return me->def->body.cfunc.func == func.anyargs;
2302 return me->def->body.cfunc.func == func;
2311 return me && METHOD_ENTRY_BASIC(me);
2317 VM_ASSERT(iseq != NULL);
2319 return check_cfunc(vm_cc_cme(cc), func);
2322#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2323#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2325#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2357opt_equality_specialized(
VALUE recv,
VALUE obj)
2359 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2360 goto compare_by_identity;
2362 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2363 goto compare_by_identity;
2366 goto compare_by_identity;
2375#if MSC_VERSION_BEFORE(1300)
2379 else if (isnan(b)) {
2384 return RBOOL(a == b);
2391 return rb_str_eql_internal(obj, recv);
2396 compare_by_identity:
2397 return RBOOL(recv == obj);
2403 VM_ASSERT(cd_owner != NULL);
2405 VALUE val = opt_equality_specialized(recv, obj);
2406 if (!UNDEF_P(val))
return val;
2408 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2412 return RBOOL(recv == obj);
2416#undef EQ_UNREDEFINED_P
2419NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2422opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2424 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2426 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2427 return RBOOL(recv == obj);
2437 VALUE val = opt_equality_specialized(recv, obj);
2438 if (!UNDEF_P(val)) {
2442 return opt_equality_by_mid_slowpath(recv, obj, mid);
2449 return opt_equality_by_mid(obj1, obj2, idEq);
2455 return opt_equality_by_mid(obj1, obj2, idEqlP);
2465 case VM_CHECKMATCH_TYPE_WHEN:
2467 case VM_CHECKMATCH_TYPE_RESCUE:
2469 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2472 case VM_CHECKMATCH_TYPE_CASE: {
2473 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2476 rb_bug(
"check_match: unreachable");
2481#if MSC_VERSION_BEFORE(1300)
2482#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2484#define CHECK_CMP_NAN(a, b)
2488double_cmp_lt(
double a,
double b)
2490 CHECK_CMP_NAN(a, b);
2491 return RBOOL(a < b);
2495double_cmp_le(
double a,
double b)
2497 CHECK_CMP_NAN(a, b);
2498 return RBOOL(a <= b);
2502double_cmp_gt(
double a,
double b)
2504 CHECK_CMP_NAN(a, b);
2505 return RBOOL(a > b);
2509double_cmp_ge(
double a,
double b)
2511 CHECK_CMP_NAN(a, b);
2512 return RBOOL(a >= b);
2516static inline VALUE *
2521 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2522 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2524 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2525 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2526 int params = ISEQ_BODY(cfp->iseq)->param.size;
2529 bp += vm_ci_argc(ci);
2532 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2536#if VM_DEBUG_BP_CHECK
2537 if (bp != cfp->bp_check) {
2538 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2539 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2540 (
long)(bp - GET_EC()->vm_stack));
2541 rb_bug(
"vm_base_ptr: unreachable");
2554 return vm_base_ptr(cfp);
2569static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2574 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2576 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2582 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2585 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2586 int param = ISEQ_BODY(iseq)->param.size;
2587 int local = ISEQ_BODY(iseq)->local_table_size;
2588 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2594 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2595 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2596 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2597 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2598 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2599 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2600 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2601 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2605rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2607 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2608 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2609 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2610 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2611 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2612 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2613 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2614 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2618rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2620 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2621 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2622 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2623 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2624 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2629#define ALLOW_HEAP_ARGV (-2)
2630#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2635 vm_check_canary(GET_EC(), cfp->sp);
2641 int argc = calling->argc;
2643 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2647 VALUE *argv = cfp->sp - argc;
2651 cfp->sp -= argc - 1;
2652 cfp->sp[-1] = argv_ary;
2654 calling->heap_argv = argv_ary;
2660 if (max_args >= 0 &&
len + argc > max_args) {
2668 calling->argc +=
len - (max_args - argc + 1);
2669 len = max_args - argc + 1;
2678 calling->heap_argv = 0;
2680 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2682 for (i = 0; i <
len; i++) {
2683 *cfp->sp++ = ptr[i];
2695 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2696 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2697 const VALUE h = rb_hash_new_with_size(kw_len);
2698 VALUE *sp = cfp->sp;
2701 for (i=0; i<kw_len; i++) {
2702 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2706 cfp->sp -= kw_len - 1;
2707 calling->argc -= kw_len - 1;
2708 calling->kw_splat = 1;
2712vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2715 if (keyword_hash !=
Qnil) {
2717 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2720 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2726 keyword_hash = rb_hash_dup(keyword_hash);
2728 return keyword_hash;
2734 const struct rb_callinfo *restrict ci,
int max_args)
2736 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2737 if (IS_ARGS_KW_SPLAT(ci)) {
2739 VM_ASSERT(calling->kw_splat == 1);
2743 VALUE ary = cfp->sp[0];
2744 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2747 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2751 if (UNLIKELY(calling->heap_argv)) {
2753 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2754 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2755 calling->kw_splat = 0;
2763 VM_ASSERT(calling->kw_splat == 1);
2767 calling->kw_splat = 0;
2772 VM_ASSERT(calling->kw_splat == 0);
2776 VALUE ary = cfp->sp[0];
2778 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2783 VALUE last_hash, argv_ary;
2784 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2785 if (!IS_ARGS_KEYWORD(ci) &&
2788 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2793 calling->kw_splat = 1;
2799 if (!IS_ARGS_KEYWORD(ci) &&
2800 calling->argc > 0 &&
2802 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2809 cfp->sp[-1] = rb_hash_dup(last_hash);
2810 calling->kw_splat = 1;
2816 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2818 VM_ASSERT(calling->kw_splat == 1);
2819 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2824 calling->kw_splat = 0;
2830 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2832 VM_ASSERT(calling->kw_splat == 0);
2838 vm_caller_setup_arg_kw(cfp, calling, ci);
2842#define USE_OPT_HIST 0
2845#define OPT_HIST_MAX 64
2846static int opt_hist[OPT_HIST_MAX+1];
2850opt_hist_show_results_at_exit(
void)
2852 for (
int i=0; i<OPT_HIST_MAX; i++) {
2853 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2863 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2864 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2865 const int opt = calling->argc - lead_num;
2866 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2867 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2868 const int param = ISEQ_BODY(iseq)->param.size;
2869 const int local = ISEQ_BODY(iseq)->local_table_size;
2870 const int delta = opt_num - opt;
2872 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2875 if (opt_pc < OPT_HIST_MAX) {
2879 opt_hist[OPT_HIST_MAX]++;
2883 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2891 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2892 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2893 const int opt = calling->argc - lead_num;
2894 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2896 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2899 if (opt_pc < OPT_HIST_MAX) {
2903 opt_hist[OPT_HIST_MAX]++;
2907 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2912 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2913 VALUE *
const locals);
2920 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2921 int param_size = ISEQ_BODY(iseq)->param.size;
2922 int local_size = ISEQ_BODY(iseq)->local_table_size;
2925 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2927 local_size = local_size + vm_ci_argc(calling->cd->ci);
2928 param_size = param_size + vm_ci_argc(calling->cd->ci);
2930 cfp->sp[0] = (
VALUE)calling->cd->ci;
2932 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2942 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2943 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2945 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2946 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2948 const int ci_kw_len = kw_arg->keyword_len;
2949 const VALUE *
const ci_keywords = kw_arg->keywords;
2950 VALUE *argv = cfp->sp - calling->argc;
2951 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2952 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2954 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2955 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2957 int param = ISEQ_BODY(iseq)->param.size;
2958 int local = ISEQ_BODY(iseq)->local_table_size;
2959 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2966 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2969 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2970 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2972 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2973 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2974 VALUE *
const argv = cfp->sp - calling->argc;
2975 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2978 for (i=0; i<kw_param->num; i++) {
2979 klocals[i] = kw_param->default_values[i];
2986 int param = ISEQ_BODY(iseq)->param.size;
2987 int local = ISEQ_BODY(iseq)->local_table_size;
2988 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2998 cfp->sp -= (calling->argc + 1);
2999 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3000 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3009 set_table *dup_check_table = vm->unused_block_warning_table;
3019 .v = (
VALUE)cme->def,
3023 if (!strict_unused_block) {
3024 key = (st_data_t)cme->def->original_id;
3026 if (set_lookup(dup_check_table, key)) {
3036 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3041 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3042 fprintf(stderr,
"key:%p\n", (
void *)key);
3046 if (set_insert(dup_check_table, key)) {
3051 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3053 if (!
NIL_P(m_loc)) {
3054 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3058 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3065 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3070 VM_ASSERT((vm_ci_argc(ci), 1));
3071 VM_ASSERT(vm_cc_cme(cc) != NULL);
3073 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3074 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3075 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3076 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3079 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3080 if (LIKELY(rb_simple_iseq_p(iseq))) {
3082 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3083 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3085 if (calling->argc != lead_num) {
3086 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
3090 VM_ASSERT(cc == calling->cc);
3092 if (vm_call_iseq_optimizable_p(ci, cc)) {
3093 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3095 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3096 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3097 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3100 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3105 else if (rb_iseq_only_optparam_p(iseq)) {
3108 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3109 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3111 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3112 const int argc = calling->argc;
3113 const int opt = argc - lead_num;
3115 if (opt < 0 || opt > opt_num) {
3116 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
3119 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3120 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3121 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3122 vm_call_cacheable(ci, cc));
3125 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3126 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3127 vm_call_cacheable(ci, cc));
3131 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3132 for (
int i=argc; i<lead_num + opt_num; i++) {
3135 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3137 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3138 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3139 const int argc = calling->argc;
3140 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3142 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3145 if (argc - kw_arg->keyword_len == lead_num) {
3146 const int ci_kw_len = kw_arg->keyword_len;
3147 const VALUE *
const ci_keywords = kw_arg->keywords;
3149 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3151 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3152 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3154 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3155 vm_call_cacheable(ci, cc));
3160 else if (argc == lead_num) {
3162 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3163 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3165 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3167 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3168 vm_call_cacheable(ci, cc));
3194 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3195 bool can_fastpath =
true;
3197 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3199 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3200 ci = vm_ci_new_runtime(
3207 ci = forward_cd->caller_ci;
3209 can_fastpath =
false;
3213 if (!vm_ci_markable(ci)) {
3214 ci = vm_ci_new_runtime(
3219 can_fastpath =
false;
3221 argv[param_size - 1] = (
VALUE)ci;
3222 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3226 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3253 const VALUE * lep = VM_CF_LEP(cfp);
3259 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3264 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3268 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3270 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3271 VALUE * to = cfp->sp - 1;
3275 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3280 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3282 cfp->sp = to + argc;
3301 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3304 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3305 int param_size = ISEQ_BODY(iseq)->param.size;
3306 int local_size = ISEQ_BODY(iseq)->local_table_size;
3308 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3310 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3311 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3317 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3320 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3321 int param_size = ISEQ_BODY(iseq)->param.size;
3322 int local_size = ISEQ_BODY(iseq)->local_table_size;
3324 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3327 local_size = local_size + vm_ci_argc(calling->cd->ci);
3328 param_size = param_size + vm_ci_argc(calling->cd->ci);
3330 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3331 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3336 int opt_pc,
int param_size,
int local_size)
3341 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3342 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3345 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3351 int opt_pc,
int param_size,
int local_size)
3353 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3354 VALUE *argv = cfp->sp - calling->argc;
3355 VALUE *sp = argv + param_size;
3356 cfp->sp = argv - 1 ;
3358 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3359 calling->block_handler, (
VALUE)me,
3360 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3361 local_size - param_size,
3362 ISEQ_BODY(iseq)->stack_max);
3371 VALUE *argv = cfp->sp - calling->argc;
3373 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3374 VALUE *src_argv = argv;
3375 VALUE *sp_orig, *sp;
3376 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3378 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3379 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3380 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3381 dst_captured->code.val = src_captured->code.val;
3382 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3383 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3386 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3390 vm_pop_frame(ec, cfp, cfp->ep);
3393 sp_orig = sp = cfp->sp;
3396 sp[0] = calling->recv;
3400 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3401 *sp++ = src_argv[i];
3404 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3405 calling->recv, calling->block_handler, (
VALUE)me,
3406 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3407 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3408 ISEQ_BODY(iseq)->stack_max);
3416ractor_unsafe_check(
void)
3418 if (!rb_ractor_main_p()) {
3419 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3426 ractor_unsafe_check();
3434 ractor_unsafe_check();
3436 return (*f)(argc, argv, recv);
3442 ractor_unsafe_check();
3450 ractor_unsafe_check();
3452 return (*f)(recv, argv[0]);
3458 ractor_unsafe_check();
3460 return (*f)(recv, argv[0], argv[1]);
3466 ractor_unsafe_check();
3468 return (*f)(recv, argv[0], argv[1], argv[2]);
3474 ractor_unsafe_check();
3476 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3482 ractor_unsafe_check();
3483 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3484 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3490 ractor_unsafe_check();
3491 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3492 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3498 ractor_unsafe_check();
3499 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3500 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3506 ractor_unsafe_check();
3507 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3508 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3514 ractor_unsafe_check();
3515 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3516 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3522 ractor_unsafe_check();
3523 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3524 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3530 ractor_unsafe_check();
3531 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3532 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3538 ractor_unsafe_check();
3539 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3540 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3546 ractor_unsafe_check();
3547 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3548 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3554 ractor_unsafe_check();
3555 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3556 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3562 ractor_unsafe_check();
3563 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3564 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3578 return (*f)(argc, argv, recv);
3592 return (*f)(recv, argv[0]);
3599 return (*f)(recv, argv[0], argv[1]);
3606 return (*f)(recv, argv[0], argv[1], argv[2]);
3613 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3619 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3620 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3626 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3627 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3633 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3634 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3640 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3641 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3647 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3648 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3654 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3655 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3661 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3662 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3668 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3669 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3675 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3676 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3682 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3683 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3689 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3690 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3696 const int ov_flags = RAISED_STACKOVERFLOW;
3697 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3698 if (rb_ec_raised_p(ec, ov_flags)) {
3699 rb_ec_raised_reset(ec, ov_flags);
3705#define CHECK_CFP_CONSISTENCY(func) \
3706 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3707 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3713#if VM_DEBUG_VERIFY_METHOD_CACHE
3714 switch (me->def->type) {
3715 case VM_METHOD_TYPE_CFUNC:
3716 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3718# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3720 METHOD_BUG(ATTRSET);
3722 METHOD_BUG(BMETHOD);
3725 METHOD_BUG(OPTIMIZED);
3726 METHOD_BUG(MISSING);
3727 METHOD_BUG(REFINED);
3731 rb_bug(
"wrong method type: %d", me->def->type);
3734 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3741 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3748 VALUE recv = calling->recv;
3749 VALUE block_handler = calling->block_handler;
3750 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3752 if (UNLIKELY(calling->kw_splat)) {
3753 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3756 VM_ASSERT(reg_cfp == ec->cfp);
3758 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3761 vm_push_frame(ec, NULL, frame_type, recv,
3762 block_handler, (
VALUE)me,
3763 0, ec->cfp->sp, 0, 0);
3765 int len = cfunc->argc;
3768 reg_cfp->sp = stack_bottom;
3769 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3771 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3773 rb_vm_pop_frame(ec);
3775 VM_ASSERT(ec->cfp->sp == stack_bottom);
3777 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3778 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3788 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3790 VALUE *sp = ec->cfp->sp;
3791 VALUE recv = *(sp - recv_idx - 1);
3792 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3793 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3794#if VM_CHECK_MODE > 0
3796 *(GET_EC()->cfp->sp) =
Qfalse;
3798 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3803rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3805 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3811 int argc = calling->argc;
3812 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3813 VALUE *argv = &stack_bottom[1];
3815 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3822 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3824 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3826 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3827 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3830 VALUE *stack_bottom = reg_cfp->sp - 2;
3832 VM_ASSERT(calling->argc == 1);
3836 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3839 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3841 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3848 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3851 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3852 return vm_call_cfunc_other(ec, reg_cfp, calling);
3856 calling->kw_splat = 0;
3858 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3859 VALUE *sp = stack_bottom;
3860 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3861 for(i = 0; i < argc; i++) {
3866 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3872 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3873 VALUE argv_ary = reg_cfp->sp[-1];
3877 int argc_offset = 0;
3879 if (UNLIKELY(argc > 0 &&
3881 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3883 return vm_call_cfunc_other(ec, reg_cfp, calling);
3887 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3893 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3894 VALUE keyword_hash = reg_cfp->sp[-1];
3897 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3900 return vm_call_cfunc_other(ec, reg_cfp, calling);
3907 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3909 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3910 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3912 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3913 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3915 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3917 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3918 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3922 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3923 return vm_call_cfunc_other(ec, reg_cfp, calling);
3930 RB_DEBUG_COUNTER_INC(ccf_ivar);
3932 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3939 RB_DEBUG_COUNTER_INC(ccf_attrset);
3940 VALUE val = *(cfp->sp - 1);
3943 shape_id_t dest_shape_id;
3944 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
3945 ID id = vm_cc_cme(cc)->def->body.attr.id;
3946 rb_check_frozen(obj);
3947 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3956 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3957 if (!UNDEF_P(res)) {
3962 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3970 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3980 VALUE procv = cme->def->body.bmethod.proc;
3983 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3984 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
3988 GetProcPtr(procv, proc);
3989 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3999 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4003 VALUE procv = cme->def->body.bmethod.proc;
4006 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4007 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4011 GetProcPtr(procv, proc);
4012 const struct rb_block *block = &proc->block;
4014 while (vm_block_type(block) == block_type_proc) {
4015 block = vm_proc_block(block->as.proc);
4017 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4020 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4021 VALUE *
const argv = cfp->sp - calling->argc;
4022 const int arg_size = ISEQ_BODY(iseq)->param.size;
4025 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4026 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4029 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4034 vm_push_frame(ec, iseq,
4035 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4037 VM_GUARDED_PREV_EP(captured->ep),
4039 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4041 ISEQ_BODY(iseq)->local_table_size - arg_size,
4042 ISEQ_BODY(iseq)->stack_max);
4050 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4054 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4055 if (UNLIKELY(calling->heap_argv)) {
4060 argc = calling->argc;
4063 cfp->sp += - argc - 1;
4066 return vm_call_bmethod_body(ec, calling, argv);
4072 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4076 VALUE procv = cme->def->body.bmethod.proc;
4078 GetProcPtr(procv, proc);
4079 const struct rb_block *block = &proc->block;
4081 while (vm_block_type(block) == block_type_proc) {
4082 block = vm_proc_block(block->as.proc);
4084 if (vm_block_type(block) == block_type_iseq) {
4085 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4086 return vm_call_iseq_bmethod(ec, cfp, calling);
4089 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4090 return vm_call_noniseq_bmethod(ec, cfp, calling);
4094rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4096 VALUE klass = current_class;
4104 while (
RTEST(klass)) {
4106 if (owner == target_owner) {
4112 return current_class;
4121 if (orig_me->defined_class == 0) {
4122 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4123 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4124 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4126 if (me->def->reference_count == 1) {
4127 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4131 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4139 VM_ASSERT(callable_method_entry_p(cme));
4146 return aliased_callable_method_entry(me);
4152 calling->cc = &VM_CC_ON_STACK(
Qundef,
4155 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4157 return vm_call_method_each_type(ec, cfp, calling);
4160static enum method_missing_reason
4163 enum method_missing_reason stat = MISSING_NOENTRY;
4164 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4165 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4166 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4176 ASSUME(calling->argc >= 0);
4178 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4179 int argc = calling->argc;
4180 VALUE recv = calling->recv;
4183 flags |= VM_CALL_OPT_SEND;
4185 if (UNLIKELY(! mid)) {
4186 mid = idMethodMissing;
4187 missing_reason = ci_missing_reason(ci);
4188 ec->method_missing_reason = missing_reason;
4191 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4192 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4193 rb_ary_unshift(argv_ary, symbol);
4196 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4197 VALUE exc = rb_make_no_method_exception(
4219 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4222 argc = ++calling->argc;
4224 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4227 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4228 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4229 VALUE exc = rb_make_no_method_exception(
4242 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4248 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4249 calling->cd = &new_fcd.cd;
4253 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4254 new_fcd.caller_ci = caller_ci;
4257 calling->cc = &VM_CC_ON_STACK(klass,
4259 { .method_missing_reason = missing_reason },
4260 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4262 if (flags & VM_CALL_FCALL) {
4263 return vm_call_method(ec, reg_cfp, calling);
4267 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4269 if (vm_cc_cme(cc) != NULL) {
4270 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4271 case METHOD_VISI_PUBLIC:
4272 return vm_call_method_each_type(ec, reg_cfp, calling);
4273 case METHOD_VISI_PRIVATE:
4274 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4276 case METHOD_VISI_PROTECTED:
4277 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4280 VM_UNREACHABLE(vm_call_method);
4282 return vm_call_method_missing(ec, reg_cfp, calling);
4285 return vm_call_method_nome(ec, reg_cfp, calling);
4295 i = calling->argc - 1;
4297 if (calling->argc == 0) {
4298 rb_raise(rb_eArgError,
"no method name given");
4322 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4328 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4330 int flags = VM_CALL_FCALL;
4334 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4335 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4337 flags |= VM_CALL_ARGS_SPLAT;
4338 if (calling->kw_splat) {
4339 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4340 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4341 calling->kw_splat = 0;
4343 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4346 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4347 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4353 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4354 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4360 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4363 int flags = vm_ci_flag(ci);
4365 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4366 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4367 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4368 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4369 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4370 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4373 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4374 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4379 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4381 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4383 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4384 unsigned int argc, flag;
4386 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4387 argc = ++calling->argc;
4390 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4391 vm_check_canary(ec, reg_cfp->sp);
4395 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4398 ec->method_missing_reason = reason;
4402 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4408 if (!(flag & VM_CALL_FORWARDING)) {
4409 calling->cd = &new_fcd.cd;
4413 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4414 new_fcd.caller_ci = caller_ci;
4418 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4419 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4420 return vm_call_method(ec, reg_cfp, calling);
4426 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4437 return vm_call_method_nome(ec, cfp, calling);
4439 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4440 cme->def->body.refined.orig_me) {
4441 cme = refined_method_callable_without_refinement(cme);
4444 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4446 return vm_call_method_each_type(ec, cfp, calling);
4450find_refinement(
VALUE refinements,
VALUE klass)
4452 if (
NIL_P(refinements)) {
4455 return rb_hash_lookup(refinements, klass);
4464 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4465 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4468 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4469 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4473 }
while (cfp->iseq != local_iseq);
4484 if (orig_me->defined_class == 0) {
4492 VM_ASSERT(callable_method_entry_p(cme));
4494 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4504 ID mid = vm_ci_mid(calling->cd->ci);
4505 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4509 for (; cref; cref = CREF_NEXT(cref)) {
4510 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4511 if (
NIL_P(refinement))
continue;
4514 rb_callable_method_entry(refinement, mid);
4517 if (vm_cc_call(cc) == vm_call_super_method) {
4520 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4525 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4526 cme->def != ref_me->def) {
4529 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4538 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4539 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4554 if (calling->cd->cc) {
4555 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4557 return vm_call_method(ec, cfp, calling);
4560 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4561 calling->cc= ref_cc;
4562 return vm_call_method(ec, cfp, calling);
4566 return vm_call_method_nome(ec, cfp, calling);
4572NOINLINE(
static VALUE
4580 int argc = calling->argc;
4583 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4586 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4592 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4595 VALUE procval = calling->recv;
4596 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4602 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4604 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4607 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4608 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4611 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4612 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4613 return vm_call_general(ec, reg_cfp, calling);
4620 VALUE recv = calling->recv;
4623 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4624 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4626 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4627 return internal_RSTRUCT_GET(recv,
off);
4633 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4635 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4643 VALUE recv = calling->recv;
4646 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4647 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4649 rb_check_frozen(recv);
4651 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4652 internal_RSTRUCT_SET(recv,
off, val);
4660 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4662 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4670#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4671 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4672 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4673 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4675 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4676 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4687 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4688 case OPTIMIZED_METHOD_TYPE_SEND:
4689 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4690 return vm_call_opt_send(ec, cfp, calling);
4691 case OPTIMIZED_METHOD_TYPE_CALL:
4692 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4693 return vm_call_opt_call(ec, cfp, calling);
4694 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4695 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4696 return vm_call_opt_block_call(ec, cfp, calling);
4697 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4698 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4702 VM_CALL_METHOD_ATTR(v,
4703 vm_call_opt_struct_aref(ec, cfp, calling),
4704 set_vm_cc_ivar(cc); \
4705 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4708 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4709 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4713 VM_CALL_METHOD_ATTR(v,
4714 vm_call_opt_struct_aset(ec, cfp, calling),
4715 set_vm_cc_ivar(cc); \
4716 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4720 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4732 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4734 switch (cme->def->type) {
4735 case VM_METHOD_TYPE_ISEQ:
4736 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4737 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4738 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4741 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4742 return vm_call_iseq_setup(ec, cfp, calling);
4745 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4746 case VM_METHOD_TYPE_CFUNC:
4747 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4748 return vm_call_cfunc(ec, cfp, calling);
4750 case VM_METHOD_TYPE_ATTRSET:
4751 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4755 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4757 if (vm_cc_markable(cc)) {
4758 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4759 VM_CALL_METHOD_ATTR(v,
4760 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4761 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4767 VM_CALLCACHE_UNMARKABLE |
4768 VM_CALLCACHE_ON_STACK,
4774 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4779 VM_CALL_METHOD_ATTR(v,
4780 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4781 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4785 case VM_METHOD_TYPE_IVAR:
4786 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4788 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4789 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4790 VM_CALL_METHOD_ATTR(v,
4791 vm_call_ivar(ec, cfp, calling),
4792 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4795 case VM_METHOD_TYPE_MISSING:
4796 vm_cc_method_missing_reason_set(cc, 0);
4797 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4798 return vm_call_method_missing(ec, cfp, calling);
4800 case VM_METHOD_TYPE_BMETHOD:
4801 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4802 return vm_call_bmethod(ec, cfp, calling);
4804 case VM_METHOD_TYPE_ALIAS:
4805 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4806 return vm_call_alias(ec, cfp, calling);
4808 case VM_METHOD_TYPE_OPTIMIZED:
4809 return vm_call_optimized(ec, cfp, calling, ci, cc);
4811 case VM_METHOD_TYPE_UNDEF:
4814 case VM_METHOD_TYPE_ZSUPER:
4815 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4817 case VM_METHOD_TYPE_REFINED:
4820 return vm_call_refined(ec, cfp, calling);
4823 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4833 const int stat = ci_missing_reason(ci);
4835 if (vm_ci_mid(ci) == idMethodMissing) {
4836 if (UNLIKELY(calling->heap_argv)) {
4841 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4842 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4846 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4858 VALUE defined_class = me->defined_class;
4859 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4860 return NIL_P(refined_class) ? defined_class : refined_class;
4869 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4871 if (vm_cc_cme(cc) != NULL) {
4872 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4873 case METHOD_VISI_PUBLIC:
4874 return vm_call_method_each_type(ec, cfp, calling);
4876 case METHOD_VISI_PRIVATE:
4877 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4878 enum method_missing_reason stat = MISSING_PRIVATE;
4879 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4881 vm_cc_method_missing_reason_set(cc, stat);
4882 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4883 return vm_call_method_missing(ec, cfp, calling);
4885 return vm_call_method_each_type(ec, cfp, calling);
4887 case METHOD_VISI_PROTECTED:
4888 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4889 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4891 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4892 return vm_call_method_missing(ec, cfp, calling);
4896 VM_ASSERT(vm_cc_cme(cc) != NULL);
4899 calling->cc = &cc_on_stack;
4900 return vm_call_method_each_type(ec, cfp, calling);
4903 return vm_call_method_each_type(ec, cfp, calling);
4906 rb_bug(
"unreachable");
4910 return vm_call_method_nome(ec, cfp, calling);
4917 RB_DEBUG_COUNTER_INC(ccf_general);
4918 return vm_call_method(ec, reg_cfp, calling);
4924 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4925 VM_ASSERT(cc != vm_cc_empty());
4927 *(vm_call_handler *)&cc->call_ = vm_call_general;
4933 RB_DEBUG_COUNTER_INC(ccf_super_method);
4938 if (ec == NULL) rb_bug(
"unreachable");
4941 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4942 return vm_call_method(ec, reg_cfp, calling);
4948vm_search_normal_superclass(
VALUE klass)
4953 klass =
RBASIC(klass)->klass;
4955 klass = RCLASS_ORIGIN(klass);
4959NORETURN(
static void vm_super_outside(
void));
4962vm_super_outside(
void)
4968empty_cc_for_super(
void)
4970 return &vm_empty_cc_for_super;
4976 VALUE current_defined_class;
4983 current_defined_class = vm_defined_class_for_protected_call(me);
4986 reg_cfp->iseq != method_entry_iseqptr(me) &&
4989 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4993 "self has wrong type to call super in this context: "
4994 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
4999 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5001 "implicit argument passing of super from method defined"
5002 " by define_method() is not supported."
5003 " Specify all arguments explicitly.");
5006 ID mid = me->def->original_id;
5008 if (!vm_ci_markable(cd->ci)) {
5009 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5013 cd->ci = vm_ci_new_runtime(mid,
5016 vm_ci_kwarg(cd->ci));
5023 VALUE klass = vm_search_normal_superclass(me->defined_class);
5027 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5031 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5035 if (cached_cme == NULL) {
5037 cd->cc = empty_cc_for_super();
5039 else if (cached_cme->called_id != mid) {
5042 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5046 cd->cc = cc = empty_cc_for_super();
5050 switch (cached_cme->def->type) {
5052 case VM_METHOD_TYPE_REFINED:
5054 case VM_METHOD_TYPE_ATTRSET:
5055 case VM_METHOD_TYPE_IVAR:
5056 vm_cc_call_set(cc, vm_call_super_method);
5064 VM_ASSERT((vm_cc_cme(cc),
true));
5072block_proc_is_lambda(
const VALUE procval)
5077 GetProcPtr(procval, proc);
5078 return proc->is_lambda;
5086block_proc_namespace(
const VALUE procval)
5091 GetProcPtr(procval, proc);
5102 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5105 int is_lambda = FALSE;
5106 VALUE val, arg, blockarg;
5108 const struct vm_ifunc *ifunc = captured->code.ifunc;
5113 else if (argc == 0) {
5120 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5122 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5124 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5127 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5130 VM_GUARDED_PREV_EP(captured->ep),
5132 0, ec->cfp->sp, 0, 0);
5133 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5134 rb_vm_pop_frame(ec);
5142 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5148 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5157 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5159 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5167vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5169 VALUE ary, arg0 = argv[0];
5174 VM_ASSERT(argv[0] == arg0);
5182 if (rb_simple_iseq_p(iseq)) {
5186 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5188 if (arg_setup_type == arg_setup_block &&
5189 calling->argc == 1 &&
5190 ISEQ_BODY(iseq)->param.flags.has_lead &&
5191 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5192 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5193 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5196 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5197 if (arg_setup_type == arg_setup_block) {
5198 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5200 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5201 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5202 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5204 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5205 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5209 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5216 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5225 calling = &calling_entry;
5226 calling->argc = argc;
5227 calling->block_handler = block_handler;
5228 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5230 calling->heap_argv = 0;
5231 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5233 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5241 bool is_lambda,
VALUE block_handler)
5244 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5245 const int arg_size = ISEQ_BODY(iseq)->param.size;
5246 VALUE *
const rsp = GET_SP() - calling->argc;
5247 VALUE *
const argv = rsp;
5248 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5249 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5253 if (calling->proc_ns) {
5254 frame_flag |= VM_FRAME_FLAG_NS_SWITCH;
5257 vm_push_frame(ec, iseq,
5260 VM_GUARDED_PREV_EP(captured->ep), 0,
5261 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5263 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5271 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5273 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5274 int flags = vm_ci_flag(ci);
5276 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5277 ((calling->argc == 0) ||
5278 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5279 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5280 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5281 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5283 if (UNLIKELY(calling->heap_argv)) {
5284#if VM_ARGC_STACK_MAX < 0
5286 rb_raise(rb_eArgError,
"no receiver given");
5292 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5293 reg_cfp->sp[-2] = calling->recv;
5294 flags |= VM_CALL_ARGS_SPLAT;
5297 if (calling->argc < 1) {
5298 rb_raise(rb_eArgError,
"no receiver given");
5300 calling->recv = TOPN(--calling->argc);
5302 if (calling->kw_splat) {
5303 flags |= VM_CALL_KW_SPLAT;
5307 if (calling->argc < 1) {
5308 rb_raise(rb_eArgError,
"no receiver given");
5310 calling->recv = TOPN(--calling->argc);
5313 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5319 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5324 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5325 argc = calling->argc;
5326 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5332vm_proc_to_block_handler(
VALUE procval)
5334 const struct rb_block *block = vm_proc_block(procval);
5336 switch (vm_block_type(block)) {
5337 case block_type_iseq:
5338 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5339 case block_type_ifunc:
5340 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5341 case block_type_symbol:
5342 return VM_BH_FROM_SYMBOL(block->as.symbol);
5343 case block_type_proc:
5344 return VM_BH_FROM_PROC(block->as.proc);
5346 VM_UNREACHABLE(vm_yield_with_proc);
5353 bool is_lambda,
VALUE block_handler)
5355 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5356 VALUE proc = VM_BH_TO_PROC(block_handler);
5357 if (!calling->proc_ns) {
5358 calling->proc_ns = block_proc_namespace(proc);
5360 is_lambda = block_proc_is_lambda(proc);
5361 block_handler = vm_proc_to_block_handler(proc);
5364 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5370 bool is_lambda,
VALUE block_handler)
5374 bool is_lambda,
VALUE block_handler);
5376 switch (vm_block_handler_type(block_handler)) {
5377 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5378 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5379 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5380 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5381 default: rb_bug(
"vm_invoke_block: unreachable");
5384 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5388vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5395 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5398 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5399 captured->code.iseq = blockiseq;
5401 return rb_vm_make_proc(ec, captured,
rb_cProc);
5405vm_once_exec(
VALUE iseq)
5412vm_once_clear(
VALUE data)
5415 is->once.running_thread = NULL;
5427 args[0] = obj; args[1] =
Qfalse;
5429 if (!UNDEF_P(r) &&
RTEST(r)) {
5441 enum defined_type
type = (
enum defined_type)op_type;
5448 return rb_gvar_defined(
SYM2ID(obj));
5450 case DEFINED_CVAR: {
5451 const rb_cref_t *cref = vm_get_cref(GET_EP());
5452 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5457 case DEFINED_CONST_FROM: {
5458 bool allow_nil =
type == DEFINED_CONST;
5460 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5465 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5467 case DEFINED_METHOD:{
5472 switch (METHOD_ENTRY_VISI(me)) {
5473 case METHOD_VISI_PRIVATE:
5475 case METHOD_VISI_PROTECTED:
5479 case METHOD_VISI_PUBLIC:
5483 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5487 return check_respond_to_missing(obj, v);
5492 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5496 case DEFINED_ZSUPER:
5501 VALUE klass = vm_search_normal_superclass(me->defined_class);
5502 if (!klass)
return false;
5504 ID id = me->def->original_id;
5511 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5513 rb_bug(
"unimplemented defined? type (VM)");
5523 return vm_defined(ec, reg_cfp, op_type, obj, v);
5527vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5530 const VALUE *ep = reg_ep;
5531 for (i = 0; i < lv; i++) {
5532 ep = GET_PREV_EP(ep);
5538vm_get_special_object(
const VALUE *
const reg_ep,
5539 enum vm_special_object_type
type)
5542 case VM_SPECIAL_OBJECT_VMCORE:
5543 return rb_mRubyVMFrozenCore;
5544 case VM_SPECIAL_OBJECT_CBASE:
5545 return vm_get_cbase(reg_ep);
5546 case VM_SPECIAL_OBJECT_CONST_BASE:
5547 return vm_get_const_base(reg_ep);
5549 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5556 const VALUE ary2 = ary2st;
5557 VALUE tmp1 = rb_check_to_array(ary1);
5558 VALUE tmp2 = rb_check_to_array(ary2);
5579 const VALUE ary2 = ary2st;
5581 if (
NIL_P(ary2))
return ary1;
5583 VALUE tmp2 = rb_check_to_array(ary2);
5598 return vm_concat_array(ary1, ary2st);
5602rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5604 return vm_concat_to_array(ary1, ary2st);
5613 VALUE tmp = rb_check_to_array(ary);
5617 else if (
RTEST(flag)) {
5630 return vm_splat_array(flag, ary);
5636 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5638 if (flag & VM_CHECKMATCH_ARRAY) {
5642 for (i = 0; i < n; i++) {
5644 VALUE c = check_match(ec, v, target,
type);
5653 return check_match(ec, pattern, target,
type);
5660 return vm_check_match(ec, target, pattern, flag);
5664vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5666 const VALUE kw_bits = *(ep - bits);
5669 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5670 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5683 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5684 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5685 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5686 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5690 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5693 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5696 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5699 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5706vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5711 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5712 return rb_public_const_get_at(cbase,
id);
5720vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5725 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5730 "superclass mismatch for class %"PRIsVALUE
"",
5743vm_check_if_module(
ID id,
VALUE mod)
5762vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5765 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5773vm_declare_module(
ID id,
VALUE cbase)
5779NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5783 VALUE name = rb_id2str(
id);
5784 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5786 VALUE location = rb_const_source_location_at(cbase,
id);
5787 if (!
NIL_P(location)) {
5788 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5789 " previous definition of %"PRIsVALUE
" was here",
5796vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5800 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5802 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5806 vm_check_if_namespace(cbase);
5811 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5812 if (!vm_check_if_class(
id, flags, super, klass))
5813 unmatched_redefinition(
"class", cbase,
id, klass);
5817 return vm_declare_class(
id, flags, cbase, super);
5822vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5826 vm_check_if_namespace(cbase);
5827 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5828 if (!vm_check_if_module(
id, mod))
5829 unmatched_redefinition(
"module", cbase,
id, mod);
5833 return vm_declare_module(
id, cbase);
5838vm_find_or_create_class_by_id(
ID id,
5843 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5846 case VM_DEFINECLASS_TYPE_CLASS:
5848 return vm_define_class(
id, flags, cbase, super);
5850 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5854 case VM_DEFINECLASS_TYPE_MODULE:
5856 return vm_define_module(
id, flags, cbase);
5859 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5863static rb_method_visibility_t
5868 if (!vm_env_cref_by_cref(cfp->ep)) {
5869 return METHOD_VISI_PUBLIC;
5872 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5881 if (!vm_env_cref_by_cref(cfp->ep)) {
5885 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5893 rb_method_visibility_t visi;
5898 visi = METHOD_VISI_PUBLIC;
5901 klass = CREF_CLASS_FOR_DEFINITION(cref);
5902 visi = vm_scope_visibility_get(ec);
5909 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5912 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
5915 if (!is_singleton && vm_scope_module_func_check(ec)) {
5917 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5927 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5929 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5930 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5933 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5937enum method_explorer_type {
5939 mexp_search_invokeblock,
5948 VALUE block_handler,
5949 enum method_explorer_type method_explorer
5954 int argc = vm_ci_argc(ci);
5955 VALUE recv = TOPN(argc);
5957 .block_handler = block_handler,
5958 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5964 switch (method_explorer) {
5965 case mexp_search_method:
5966 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5967 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5969 case mexp_search_super:
5970 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5971 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5973 case mexp_search_invokeblock:
5974 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5991 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
5992 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
5994 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
5996 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6001 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6002 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6013 VALUE bh = VM_BLOCK_HANDLER_NONE;
6014 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6029 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6030 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6032 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6034 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6039 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6040 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6051 VALUE bh = VM_BLOCK_HANDLER_NONE;
6052 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6080 if (check_method_basic_definition(vm_cc_cme(cc))) {
6089 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6095 val = rb_mod_to_s(recv);
6101 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6102 return rb_nil_to_s(recv);
6106 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6107 return rb_true_to_s(recv);
6111 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6112 return rb_false_to_s(recv);
6116 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6117 return rb_fix_to_s(recv);
6125vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6127 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6136vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6138 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6147vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6149 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6163 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6167 VALUE args[1] = {target};
6170 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6173 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6180 return vm_opt_duparray_include_p(ec, ary, target);
6186 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6191 VALUE result = *ptr;
6192 rb_snum_t i = num - 1;
6194 const VALUE v = *++ptr;
6195 if (OPTIMIZED_CMP(v, result) > 0) {
6210 return vm_opt_newarray_max(ec, num, ptr);
6216 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6221 VALUE result = *ptr;
6222 rb_snum_t i = num - 1;
6224 const VALUE v = *++ptr;
6225 if (OPTIMIZED_CMP(v, result) < 0) {
6240 return vm_opt_newarray_min(ec, num, ptr);
6247 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6248 return rb_ary_hash_values(num, ptr);
6258 return vm_opt_newarray_hash(ec, num, ptr);
6267 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6269 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6273 VALUE args[1] = {target};
6281 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6287 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6289 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6290 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6300 if (!UNDEF_P(buffer)) {
6301 args[1] = rb_hash_new_with_size(1);
6302 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6307 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6314 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6320 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6326vm_track_constant_cache(
ID id,
void *ic)
6329 struct rb_id_table *const_cache = vm->constant_cache;
6330 VALUE lookup_result;
6333 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6337 ics = set_init_numtable();
6338 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6353 vm->inserting_constant_cache_id = id;
6355 set_insert(ics, (st_data_t)ic);
6357 vm->inserting_constant_cache_id = (
ID)0;
6364 for (
int i = 0; segments[i]; i++) {
6365 ID id = segments[i];
6366 if (
id == idNULL)
continue;
6367 vm_track_constant_cache(
id, ic);
6376 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6377 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6379 return (ic_cref == NULL ||
6380 ic_cref == vm_get_cref(reg_ep));
6388 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6389 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6394rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6396 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6402 if (ruby_vm_const_missing_count > 0) {
6403 ruby_vm_const_missing_count = 0;
6410 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6415 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6416 rb_yjit_constant_ic_update(iseq, ic, pos);
6425 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6428 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6431 ruby_vm_constant_cache_misses++;
6432 val = vm_get_ev_const_chain(ec, segments);
6433 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6436 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6448 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6449 return is->once.value;
6451 else if (is->once.running_thread == NULL) {
6453 is->once.running_thread = th;
6457 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6460 else if (is->once.running_thread == th) {
6462 return vm_once_exec((
VALUE)iseq);
6466 RUBY_VM_CHECK_INTS(ec);
6473vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6475 switch (OBJ_BUILTIN_TYPE(key)) {
6481 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6482 SYMBOL_REDEFINED_OP_FLAG |
6483 INTEGER_REDEFINED_OP_FLAG |
6484 FLOAT_REDEFINED_OP_FLAG |
6485 NIL_REDEFINED_OP_FLAG |
6486 TRUE_REDEFINED_OP_FLAG |
6487 FALSE_REDEFINED_OP_FLAG |
6488 STRING_REDEFINED_OP_FLAG)) {
6492 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6496 if (rb_hash_stlike_lookup(hash, key, &val)) {
6516 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6517 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6518 static const char stack_consistency_error[] =
6519 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6520#if defined RUBY_DEVEL
6521 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6526 rb_bug(stack_consistency_error, nsp, nbp);
6533 if (FIXNUM_2_P(recv, obj) &&
6534 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6535 return rb_fix_plus_fix(recv, obj);
6537 else if (FLONUM_2_P(recv, obj) &&
6538 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6546 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6551 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6552 return rb_str_opt_plus(recv, obj);
6556 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6567 if (FIXNUM_2_P(recv, obj) &&
6568 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6569 return rb_fix_minus_fix(recv, obj);
6571 else if (FLONUM_2_P(recv, obj) &&
6572 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6580 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6591 if (FIXNUM_2_P(recv, obj) &&
6592 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6593 return rb_fix_mul_fix(recv, obj);
6595 else if (FLONUM_2_P(recv, obj) &&
6596 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6604 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6615 if (FIXNUM_2_P(recv, obj) &&
6616 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6617 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6619 else if (FLONUM_2_P(recv, obj) &&
6620 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6621 return rb_flo_div_flo(recv, obj);
6628 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6629 return rb_flo_div_flo(recv, obj);
6639 if (FIXNUM_2_P(recv, obj) &&
6640 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6641 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6643 else if (FLONUM_2_P(recv, obj) &&
6644 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6652 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6663 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6664 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6666 if (!UNDEF_P(val)) {
6667 return RBOOL(!
RTEST(val));
6677 if (FIXNUM_2_P(recv, obj) &&
6678 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6681 else if (FLONUM_2_P(recv, obj) &&
6682 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6690 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6702 if (FIXNUM_2_P(recv, obj) &&
6703 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6706 else if (FLONUM_2_P(recv, obj) &&
6707 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6715 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6727 if (FIXNUM_2_P(recv, obj) &&
6728 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6731 else if (FLONUM_2_P(recv, obj) &&
6732 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6740 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6752 if (FIXNUM_2_P(recv, obj) &&
6753 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6756 else if (FLONUM_2_P(recv, obj) &&
6757 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6765 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6782 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6791 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6809 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6820 if (FIXNUM_2_P(recv, obj) &&
6821 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6833 if (FIXNUM_2_P(recv, obj) &&
6834 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6835 return rb_fix_aref(recv, obj);
6840 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6842 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6845 return rb_ary_aref1(recv, obj);
6849 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6850 return rb_hash_aref(recv, obj);
6864 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6870 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6871 rb_hash_aset(recv, obj, set);
6883 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6884 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6885 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6886 return rb_hash_aref(recv, key);
6896 return vm_opt_aref_with(recv, key);
6903 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6904 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6905 return rb_hash_aset(recv, key, val);
6915 return vm_opt_aset_with(recv, key, value);
6919vm_opt_length(
VALUE recv,
int bop)
6925 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6926 if (bop == BOP_EMPTY_P) {
6927 return LONG2NUM(RSTRING_LEN(recv));
6934 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6938 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6947vm_opt_empty_p(
VALUE recv)
6949 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6962 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6965 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6981 case RSHIFT(~0UL, 1):
6984 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7002vm_opt_succ(
VALUE recv)
7005 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7006 return fix_succ(recv);
7012 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7023 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7024 return RBOOL(!
RTEST(recv));
7039 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7043 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7061 VALUE self = GET_SELF();
7063 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7065 if (event & global_hooks->events) {
7068 vm_dtrace(event, ec);
7069 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7075 if (local_hooks != NULL) {
7076 if (event & local_hooks->events) {
7079 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7085#define VM_TRACE_HOOK(target_event, val) do { \
7086 if ((pc_events & (target_event)) & enabled_flags) { \
7087 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7094 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7095 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7096 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7102 const VALUE *pc = reg_cfp->pc;
7103 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7106 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7112 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7115 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7116 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7120 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7121 enabled_flags |= iseq_local_events;
7123 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7125 if (bmethod_frame) {
7127 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7128 bmethod_local_hooks = me->def->body.bmethod.hooks;
7129 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7130 if (bmethod_local_hooks) {
7131 bmethod_local_events = bmethod_local_hooks->events;
7136 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7140 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7148 else if (ec->trace_arg != NULL) {
7156 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7159 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7162 RSTRING_PTR(rb_iseq_path(iseq)),
7163 (
int)rb_iseq_line_no(iseq, pos),
7164 RSTRING_PTR(rb_iseq_label(iseq)));
7166 VM_ASSERT(reg_cfp->pc == pc);
7167 VM_ASSERT(pc_events != 0);
7177 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7178 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7196#if VM_CHECK_MODE > 0
7197NORETURN( NOINLINE( COLDFUNC
7198void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7201Init_vm_stack_canary(
void)
7204 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7205 vm_stack_canary |= 0x01;
7207 vm_stack_canary_was_born =
true;
7212rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7216 const char *insn = rb_insns_name(i);
7220 rb_bug(
"dead canary found at %s: %s", insn, str);
7224void Init_vm_stack_canary(
void) { }
7256 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7263 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7270 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7277 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7284 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7291 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7298 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7305 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7312 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7318 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7319 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7325 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7326 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7332 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7333 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7339 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7340 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7346 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7347 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7353 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7354 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7360 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7361 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7366static builtin_invoker
7367lookup_builtin_invoker(
int argc)
7369 static const builtin_invoker invokers[] = {
7388 return invokers[argc];
7394 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7395 SETUP_CANARY(canary_p);
7396 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7397 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7398 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7405 return invoke_bf(ec, cfp, bf, argv);
7412 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7413 for (
int i=0; i<bf->argc; i++) {
7414 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7416 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7417 (
void *)(uintptr_t)bf->func_ptr);
7420 if (bf->argc == 0) {
7421 return invoke_bf(ec, cfp, bf, NULL);
7424 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7425 return invoke_bf(ec, cfp, bf, argv);
7435 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Internal header for Namespace.
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.