11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
92 rb_bug(
"system stack overflow during GC. Faulty native extension?");
95 ec->raised_flag = RAISED_STACKOVERFLOW;
96 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
97 EC_JUMP_TAG(ec, TAG_RAISE);
99 ec_stack_overflow(ec, crit == 0);
106callable_class_p(
VALUE klass)
108#if VM_CHECK_MODE >= 2
109 if (!klass)
return FALSE;
137 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
139 if (callable_class_p(cme->defined_class)) {
149vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
151 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
152 enum imemo_type cref_or_me_type = imemo_env;
155 cref_or_me_type = imemo_type(cref_or_me);
157 if (
type & VM_FRAME_FLAG_BMETHOD) {
161 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
162 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
164 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
165 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
169 if (cref_or_me_type != imemo_ment) {
170 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
174 if (req_cref && cref_or_me_type != imemo_cref) {
175 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
178 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
179 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
183 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
189 if (cref_or_me_type == imemo_ment) {
192 if (!callable_method_entry_p(me)) {
193 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
197 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
198 VM_ASSERT(iseq == NULL ||
200 RUBY_VM_NORMAL_ISEQ_P(iseq)
204 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
214 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
217#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
219 vm_check_frame_detail(type, req_block, req_me, req_cref, \
220 specval, cref_or_me, is_cframe, iseq); \
222 switch (given_magic) {
224 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
226 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
227 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
230 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
231 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
232 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
234 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
239static VALUE vm_stack_canary;
240static bool vm_stack_canary_was_born =
false;
247 unsigned int pos = 0;
248 while (pos < ISEQ_BODY(iseq)->iseq_size) {
249 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
250 unsigned int next_pos = pos + insn_len(opcode);
251 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
256 rb_bug(
"failed to find the previous insn");
265 if (! LIKELY(vm_stack_canary_was_born)) {
268 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
272 else if (! (iseq = GET_ISEQ())) {
275 else if (LIKELY(sp[0] != vm_stack_canary)) {
284 const VALUE *orig = rb_iseq_original_iseq(iseq);
285 const VALUE iseqw = rb_iseqw_new(iseq);
287 const char *stri = rb_str_to_cstr(inspection);
288 const VALUE disasm = rb_iseq_disasm(iseq);
289 const char *strd = rb_str_to_cstr(disasm);
290 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
291 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
292 const char *name = insn_name(insn);
298 "We are killing the stack canary set by %s, "
299 "at %s@pc=%"PRIdPTR
"\n"
300 "watch out the C stack trace.\n"
302 name, stri, pos, strd);
303 rb_bug(
"see above.");
305#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
308#define vm_check_canary(ec, sp)
309#define vm_check_frame(a, b, c, d)
314vm_push_frame_debug_counter_inc(
321 RB_DEBUG_COUNTER_INC(frame_push);
323 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
324 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
325 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
328 RB_DEBUG_COUNTER_INC(frame_R2R);
331 RB_DEBUG_COUNTER_INC(frame_R2C);
336 RB_DEBUG_COUNTER_INC(frame_C2R);
339 RB_DEBUG_COUNTER_INC(frame_C2C);
344 switch (
type & VM_FRAME_MAGIC_MASK) {
345 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
346 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
347 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
348 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
349 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
350 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
351 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
352 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
353 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
356 rb_bug(
"unreachable");
359#define vm_push_frame_debug_counter_inc(ec, cfp, t)
364rb_vm_stack_canary(
void)
367 return vm_stack_canary;
373STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
374STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
375STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
391 vm_check_frame(
type, specval, cref_or_me, iseq);
392 VM_ASSERT(local_size >= 0);
395 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
396 vm_check_canary(ec, sp);
401 for (
int i=0; i < local_size; i++) {
428 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
429 atomic_signal_fence(memory_order_seq_cst);
437 vm_push_frame_debug_counter_inc(ec, cfp,
type);
445 if (VMDEBUG == 2) SDR();
447 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
454 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
456 if (VMDEBUG == 2) SDR();
458 RUBY_VM_CHECK_INTS(ec);
459 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
461 return flags & VM_FRAME_FLAG_FINISH;
467 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
475 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
479 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
481 VM_BLOCK_HANDLER_NONE,
488 return (
VALUE)dmy_iseq;
493rb_arity_error_new(
int argc,
int min,
int max)
495 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
503 rb_str_catf(err_mess,
"..%d", max);
510rb_error_arity(
int argc,
int min,
int max)
517NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
520vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
523 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
524 VM_FORCE_WRITE(&ep[index], v);
525 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
526 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
531vm_env_write(
const VALUE *ep,
int index,
VALUE v)
533 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
534 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
535 VM_STACK_ENV_WRITE(ep, index, v);
538 vm_env_write_slowpath(ep, index, v);
543rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
545 vm_env_write(ep, index, v);
551 if (block_handler == VM_BLOCK_HANDLER_NONE) {
555 switch (vm_block_handler_type(block_handler)) {
556 case block_handler_type_iseq:
557 case block_handler_type_ifunc:
558 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
559 case block_handler_type_symbol:
560 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
561 case block_handler_type_proc:
562 return VM_BH_TO_PROC(block_handler);
564 VM_UNREACHABLE(rb_vm_bh_to_procval);
573vm_svar_valid_p(
VALUE svar)
576 switch (imemo_type(svar)) {
585 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
595 if (lep && (ec == NULL || ec->root_lep != lep)) {
596 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
599 svar = ec->root_svar;
602 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
610 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
612 if (lep && (ec == NULL || ec->root_lep != lep)) {
613 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
616 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
623 const struct vm_svar *svar = lep_svar(ec, lep);
628 case VM_SVAR_LASTLINE:
629 return svar->lastline;
630 case VM_SVAR_BACKREF:
631 return svar->backref;
633 const VALUE ary = svar->others;
648 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
659 struct vm_svar *svar = lep_svar(ec, lep);
662 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
666 case VM_SVAR_LASTLINE:
669 case VM_SVAR_BACKREF:
673 VALUE ary = svar->others;
689 val = lep_svar_get(ec, lep, key);
692 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
709 rb_bug(
"unexpected back-ref");
722 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
732 return rb_reg_last_defined(backref);
734 rb_bug(
"unexpected back-ref");
738 nth = (int)(
type >> 1);
745check_method_entry(
VALUE obj,
int can_be_svar)
747 if (obj ==
Qfalse)
return NULL;
750 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
753 switch (imemo_type(obj)) {
764 rb_bug(
"check_method_entry: svar should not be there:");
773 const VALUE *ep = cfp->ep;
776 while (!VM_ENV_LOCAL_P(ep)) {
777 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
778 ep = VM_ENV_PREV_EP(ep);
781 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
787 switch (me->def->type) {
788 case VM_METHOD_TYPE_ISEQ:
789 return me->def->body.iseq.
iseqptr;
798 switch (me->def->type) {
799 case VM_METHOD_TYPE_ISEQ:
800 return me->def->body.iseq.
cref;
806#if VM_CHECK_MODE == 0
810check_cref(
VALUE obj,
int can_be_svar)
812 if (obj ==
Qfalse)
return NULL;
815 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
818 switch (imemo_type(obj)) {
829 rb_bug(
"check_method_entry: svar should not be there:");
836vm_env_cref(
const VALUE *ep)
840 while (!VM_ENV_LOCAL_P(ep)) {
841 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
842 ep = VM_ENV_PREV_EP(ep);
845 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
849is_cref(
const VALUE v,
int can_be_svar)
852 switch (imemo_type(v)) {
865vm_env_cref_by_cref(
const VALUE *ep)
867 while (!VM_ENV_LOCAL_P(ep)) {
868 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
869 ep = VM_ENV_PREV_EP(ep);
871 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
875cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
877 const VALUE v = *vptr;
881 switch (imemo_type(v)) {
884 new_cref = vm_cref_dup(cref);
889 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
894 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
898 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
907vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
909 if (vm_env_cref_by_cref(ep)) {
913 while (!VM_ENV_LOCAL_P(ep)) {
914 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
915 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
918 ep = VM_ENV_PREV_EP(ep);
920 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
921 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
924 rb_bug(
"vm_cref_dup: unreachable");
929vm_get_cref(
const VALUE *ep)
937 rb_bug(
"vm_get_cref: unreachable");
942rb_vm_get_cref(
const VALUE *ep)
944 return vm_get_cref(ep);
955 return vm_get_cref(cfp->ep);
959vm_get_const_key_cref(
const VALUE *ep)
965 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
966 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
969 cref = CREF_NEXT(cref);
982 if (CREF_CLASS(cref) == old_klass) {
983 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
984 *new_cref_ptr = new_cref;
987 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
988 cref = CREF_NEXT(cref);
989 *new_cref_ptr = new_cref;
990 new_cref_ptr = &new_cref->next;
992 *new_cref_ptr = NULL;
1001 prev_cref = vm_env_cref(ep);
1007 prev_cref = vm_env_cref(cfp->ep);
1011 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1015vm_get_cbase(
const VALUE *ep)
1017 const rb_cref_t *cref = vm_get_cref(ep);
1019 return CREF_CLASS_FOR_DEFINITION(cref);
1023vm_get_const_base(
const VALUE *ep)
1025 const rb_cref_t *cref = vm_get_cref(ep);
1028 if (!CREF_PUSHED_BY_EVAL(cref)) {
1029 return CREF_CLASS_FOR_DEFINITION(cref);
1031 cref = CREF_NEXT(cref);
1038vm_check_if_namespace(
VALUE klass)
1041 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1046vm_ensure_not_refinement_module(
VALUE self)
1049 rb_warn(
"not defined at the refinement, but at the outer class/module");
1065 if (
NIL_P(orig_klass) && allow_nil) {
1067 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1071 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1072 root_cref = CREF_NEXT(root_cref);
1075 while (cref && CREF_NEXT(cref)) {
1076 if (CREF_PUSHED_BY_EVAL(cref)) {
1080 klass = CREF_CLASS(cref);
1082 cref = CREF_NEXT(cref);
1084 if (!
NIL_P(klass)) {
1088 if ((ce = rb_const_lookup(klass,
id))) {
1089 rb_const_warn_if_deprecated(ce, klass,
id);
1092 if (am == klass)
break;
1094 if (is_defined)
return 1;
1095 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1097 goto search_continue;
1104 if (UNLIKELY(!rb_ractor_main_p())) {
1106 rb_raise(rb_eRactorIsolationError,
1107 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1118 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1119 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1133 vm_check_if_namespace(orig_klass);
1135 return rb_public_const_defined_from(orig_klass,
id);
1138 return rb_public_const_get_from(orig_klass,
id);
1146 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1154 int allow_nil = TRUE;
1155 if (segments[0] == idNULL) {
1160 while (segments[idx]) {
1161 ID id = segments[idx++];
1162 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1175 rb_bug(
"vm_get_cvar_base: no cref");
1178 while (CREF_NEXT(cref) &&
1179 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1180 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1181 cref = CREF_NEXT(cref);
1183 if (top_level_raise && !CREF_NEXT(cref)) {
1187 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1195ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1197fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1200 vm_cc_attr_index_set(cc, index, shape_id);
1203 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1207#define ractor_incidental_shareable_p(cond, val) \
1208 (!(cond) || rb_ractor_shareable_p(val))
1209#define ractor_object_incidental_shareable_p(obj, val) \
1210 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1212#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1220 shape_id_t shape_id;
1224 return default_value;
1227#if SHAPE_IN_BASIC_FLAGS
1228 shape_id = RBASIC_SHAPE_ID(obj);
1236#if !SHAPE_IN_BASIC_FLAGS
1237 shape_id = ROBJECT_SHAPE_ID(obj);
1243 if (UNLIKELY(!rb_ractor_main_p())) {
1251 if (default_value ==
Qundef) {
1259 ivar_list = RCLASS_PRIME_FIELDS(obj);
1261#if !SHAPE_IN_BASIC_FLAGS
1262 shape_id = RCLASS_SHAPE_ID(obj);
1270 rb_gen_fields_tbl_get(obj,
id, &fields_tbl);
1271#if !SHAPE_IN_BASIC_FLAGS
1272 shape_id = fields_tbl->shape_id;
1274 ivar_list = fields_tbl->as.shape.fields;
1277 return default_value;
1281 shape_id_t cached_id;
1285 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1288 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1291 if (LIKELY(cached_id == shape_id)) {
1292 RUBY_ASSERT(!rb_shape_id_too_complex_p(cached_id));
1294 if (index == ATTR_INDEX_NOT_SET) {
1295 return default_value;
1298 val = ivar_list[index];
1299#if USE_DEBUG_COUNTER
1300 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1303 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1309#if USE_DEBUG_COUNTER
1311 if (cached_id != INVALID_SHAPE_ID) {
1312 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1315 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1319 if (cached_id != INVALID_SHAPE_ID) {
1320 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1323 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1326 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1329 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1333 if (rb_shape_id_too_complex_p(shape_id)) {
1338 table = (
st_table *)RCLASS_FIELDS_HASH(obj);
1342 table = ROBJECT_FIELDS_HASH(obj);
1347 if (rb_gen_fields_tbl_get(obj, 0, &fields_tbl)) {
1348 table = fields_tbl->as.complex.table;
1354 if (!table || !st_lookup(table,
id, &val)) {
1355 val = default_value;
1359 shape_id_t previous_cached_id = cached_id;
1360 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1363 if (cached_id != previous_cached_id) {
1364 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1367 if (index == ATTR_INDEX_NOT_SET) {
1368 val = default_value;
1372 val = ivar_list[index];
1378 vm_cc_attr_index_initialize(cc, shape_id);
1381 vm_ic_attr_index_initialize(ic, shape_id);
1384 val = default_value;
1390 if (!UNDEF_P(default_value)) {
1398 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1401 return rb_attr_get(obj,
id);
1409populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1411 RUBY_ASSERT(!rb_shape_id_too_complex_p(next_shape_id));
1415 vm_cc_attr_index_set(cc, index, next_shape_id);
1418 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1430 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1433 rb_check_frozen(obj);
1435 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1437 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1439 if (!rb_shape_id_too_complex_p(next_shape_id)) {
1440 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1443 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1453 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1459 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1462NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1464vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1466#if SHAPE_IN_BASIC_FLAGS
1467 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1469 shape_id_t shape_id = rb_generic_shape_id(obj);
1475 if (shape_id == dest_shape_id) {
1476 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1478 else if (dest_shape_id != INVALID_SHAPE_ID) {
1480 rb_shape_t *dest_shape = RSHAPE(dest_shape_id);
1482 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1493 rb_gen_fields_tbl_get(obj, 0, &fields_tbl);
1495 if (shape_id != dest_shape_id) {
1496#if SHAPE_IN_BASIC_FLAGS
1497 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1499 fields_tbl->shape_id = dest_shape_id;
1503 RB_OBJ_WRITE(obj, &fields_tbl->as.shape.fields[index], val);
1505 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1511vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1519 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1520 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_id_too_complex_p(dest_shape_id));
1522 if (LIKELY(shape_id == dest_shape_id)) {
1523 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1526 else if (dest_shape_id != INVALID_SHAPE_ID) {
1528 rb_shape_t *dest_shape = RSHAPE(dest_shape_id);
1529 shape_id_t source_shape_id = dest_shape->parent_id;
1531 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1532 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1534 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1536 RUBY_ASSERT(rb_shape_get_next_iv_shape(source_shape_id,
id) == dest_shape_id);
1552 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1553 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1559 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1571 VALUE defined_class = 0;
1575 defined_class =
RBASIC(defined_class)->klass;
1578 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1580 rb_bug(
"the cvc table should be set");
1584 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1585 rb_bug(
"should have cvar cache entry");
1590 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1606 cref = vm_get_cref(GET_EP());
1608 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1609 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1611 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1617 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1619 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1625 return vm_getclassvariable(iseq, cfp,
id, ic);
1632 cref = vm_get_cref(GET_EP());
1634 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1635 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1637 rb_class_ivar_set(ic->entry->class_value,
id, val);
1641 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1645 update_classvariable_cache(iseq, klass,
id, cref, ic);
1651 vm_setclassvariable(iseq, cfp,
id, val, ic);
1657 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1668 shape_id_t dest_shape_id;
1670 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1672 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1679 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1683 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1690 vm_setinstancevariable(iseq, obj,
id, val, ic);
1699 ec->tag->state = RUBY_TAG_FATAL;
1702 ec->tag->state = TAG_THROW;
1704 else if (THROW_DATA_P(err)) {
1705 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1708 ec->tag->state = TAG_RAISE;
1715 const int flag,
const VALUE throwobj)
1723 else if (state == TAG_BREAK) {
1725 const VALUE *ep = GET_EP();
1726 const rb_iseq_t *base_iseq = GET_ISEQ();
1727 escape_cfp = reg_cfp;
1729 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1730 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1731 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1732 ep = escape_cfp->ep;
1733 base_iseq = escape_cfp->iseq;
1736 ep = VM_ENV_PREV_EP(ep);
1737 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1738 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1739 VM_ASSERT(escape_cfp->iseq == base_iseq);
1743 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1749 ep = VM_ENV_PREV_EP(ep);
1751 while (escape_cfp < eocfp) {
1752 if (escape_cfp->ep == ep) {
1753 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1754 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1759 for (i=0; i < ct->size; i++) {
1761 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1763 if (entry->type == CATCH_TYPE_BREAK &&
1764 entry->iseq == base_iseq &&
1765 entry->start < epc && entry->end >= epc) {
1766 if (entry->cont == epc) {
1775 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1780 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1783 else if (state == TAG_RETRY) {
1784 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1786 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1788 else if (state == TAG_RETURN) {
1789 const VALUE *current_ep = GET_EP();
1790 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1791 int in_class_frame = 0;
1793 escape_cfp = reg_cfp;
1796 while (!VM_ENV_LOCAL_P(ep)) {
1797 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1800 ep = VM_ENV_PREV_EP(ep);
1804 while (escape_cfp < eocfp) {
1805 const VALUE *lep = VM_CF_LEP(escape_cfp);
1811 if (lep == target_lep &&
1812 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1813 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1818 if (lep == target_lep) {
1819 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1821 if (in_class_frame) {
1826 const VALUE *tep = current_ep;
1828 while (target_lep != tep) {
1829 if (escape_cfp->ep == tep) {
1831 if (tep == target_ep) {
1835 goto unexpected_return;
1838 tep = VM_ENV_PREV_EP(tep);
1842 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1843 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1845 case ISEQ_TYPE_MAIN:
1847 if (in_class_frame)
goto unexpected_return;
1848 if (target_ep == NULL) {
1852 goto unexpected_return;
1856 case ISEQ_TYPE_EVAL: {
1858 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1859 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1860 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1861 t = ISEQ_BODY(is)->type;
1863 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1866 case ISEQ_TYPE_CLASS:
1875 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1876 if (target_ep == NULL) {
1880 goto unexpected_return;
1884 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1887 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1893 rb_bug(
"isns(throw): unsupported throw type");
1896 ec->tag->state = state;
1897 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1902 rb_num_t throw_state,
VALUE throwobj)
1904 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1905 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1908 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1911 return vm_throw_continue(ec, throwobj);
1918 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1924 int is_splat = flag & 0x01;
1927 const VALUE obj = ary;
1939 if (num + is_splat == 0) {
1942 else if (flag & 0x02) {
1947 for (i = 0; i < num -
len; i++) {
1952 for (j = 0; i < num; i++, j++) {
1974 for (; i < num -
len; i++) {
1978 for (rb_num_t j = 0; i < num; i++, j++) {
1979 *cfp->sp++ = ptr[
len - j - 1];
1983 for (rb_num_t j = 0; j < num; j++) {
1984 *cfp->sp++ = ptr[num - j - 1];
2000#if VM_CHECK_MODE > 0
2001 ccs->debug_sig = ~(
VALUE)ccs;
2007 ccs->entries = NULL;
2009 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2017 if (! vm_cc_markable(cc)) {
2021 if (UNLIKELY(ccs->len == ccs->capa)) {
2022 if (ccs->capa == 0) {
2024 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2028 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2031 VM_ASSERT(ccs->len < ccs->capa);
2033 const int pos = ccs->len++;
2034 ccs->entries[pos].argc = vm_ci_argc(ci);
2035 ccs->entries[pos].flag = vm_ci_flag(ci);
2038 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2044#if VM_CHECK_MODE > 0
2048 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2049 for (
int i=0; i<ccs->len; i++) {
2050 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2051 ccs->entries[i].flag,
2052 ccs->entries[i].argc);
2053 rp(ccs->entries[i].cc);
2060 VM_ASSERT(vm_ccs_p(ccs));
2061 VM_ASSERT(ccs->len <= ccs->capa);
2063 for (
int i=0; i<ccs->len; i++) {
2066 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2067 VM_ASSERT(vm_cc_class_check(cc, klass));
2068 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2069 VM_ASSERT(!vm_cc_super_p(cc));
2070 VM_ASSERT(!vm_cc_refinement_p(cc));
2081 const ID mid = vm_ci_mid(ci);
2082 struct rb_id_table *cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2089 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2091 const int ccs_len = ccs->len;
2093 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2094 rb_vm_ccs_free(ccs);
2095 rb_id_table_delete(cc_tbl, mid);
2099 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2104 unsigned int argc = vm_ci_argc(ci);
2105 unsigned int flag = vm_ci_flag(ci);
2107 for (
int i=0; i<ccs_len; i++) {
2108 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2109 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2110 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2112 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2114 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2115 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2117 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2118 VM_ASSERT(ccs_cc->klass == klass);
2119 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2128 cc_tbl = rb_id_table_create(2);
2129 RCLASS_WRITE_CC_TBL(klass, cc_tbl);
2132 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2138 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2140 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2143 cme = rb_callable_method_entry(klass, mid);
2146 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2150 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2151 return &vm_empty_cc;
2154 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2159 VM_ASSERT(cc_tbl != NULL);
2161 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2167 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2171 cme = rb_check_overloaded_cme(cme, ci);
2173 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2174 vm_ccs_push(klass, ccs, ci, cc);
2176 VM_ASSERT(vm_cc_cme(cc) != NULL);
2177 VM_ASSERT(cme->called_id == mid);
2178 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2192 cc = vm_search_cc(klass, ci);
2195 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2196 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2197 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2198 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2199 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2209#if USE_DEBUG_COUNTER
2213 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2215#if OPT_INLINE_METHOD_CACHE
2219 if (cd_owner && cc != empty_cc) {
2223#if USE_DEBUG_COUNTER
2224 if (!old_cc || old_cc == empty_cc) {
2226 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2228 else if (old_cc == cc) {
2229 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2231 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2232 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2234 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2235 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2236 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2239 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2244 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2245 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2256#if OPT_INLINE_METHOD_CACHE
2257 if (LIKELY(vm_cc_class_check(cc, klass))) {
2258 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2259 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2260 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2261 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2262 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2263 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2267 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2270 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2274 return vm_search_method_slowpath0(cd_owner, cd, klass);
2281 VM_ASSERT(klass !=
Qfalse);
2284 return vm_search_method_fastpath(cd_owner, cd, klass);
2287#if __has_attribute(transparent_union)
2300 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2301 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2302 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2303 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2304 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2305 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2308# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2311# define make_cfunc_type(f) (cfunc_type)(f)
2321 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2322 VM_ASSERT(callable_method_entry_p(me));
2324 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2328#if __has_attribute(transparent_union)
2329 return me->def->body.cfunc.func == func.anyargs;
2331 return me->def->body.cfunc.func == func;
2340 return me && METHOD_ENTRY_BASIC(me);
2346 VM_ASSERT(iseq != NULL);
2348 return check_cfunc(vm_cc_cme(cc), func);
2351#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2352#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2354#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2386opt_equality_specialized(
VALUE recv,
VALUE obj)
2388 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2389 goto compare_by_identity;
2391 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2392 goto compare_by_identity;
2395 goto compare_by_identity;
2404#if MSC_VERSION_BEFORE(1300)
2408 else if (isnan(b)) {
2413 return RBOOL(a == b);
2420 return rb_str_eql_internal(obj, recv);
2425 compare_by_identity:
2426 return RBOOL(recv == obj);
2432 VM_ASSERT(cd_owner != NULL);
2434 VALUE val = opt_equality_specialized(recv, obj);
2435 if (!UNDEF_P(val))
return val;
2437 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2441 return RBOOL(recv == obj);
2445#undef EQ_UNREDEFINED_P
2448NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2451opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2453 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2455 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2456 return RBOOL(recv == obj);
2466 VALUE val = opt_equality_specialized(recv, obj);
2467 if (!UNDEF_P(val)) {
2471 return opt_equality_by_mid_slowpath(recv, obj, mid);
2478 return opt_equality_by_mid(obj1, obj2, idEq);
2484 return opt_equality_by_mid(obj1, obj2, idEqlP);
2494 case VM_CHECKMATCH_TYPE_WHEN:
2496 case VM_CHECKMATCH_TYPE_RESCUE:
2498 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2501 case VM_CHECKMATCH_TYPE_CASE: {
2502 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2505 rb_bug(
"check_match: unreachable");
2510#if MSC_VERSION_BEFORE(1300)
2511#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2513#define CHECK_CMP_NAN(a, b)
2517double_cmp_lt(
double a,
double b)
2519 CHECK_CMP_NAN(a, b);
2520 return RBOOL(a < b);
2524double_cmp_le(
double a,
double b)
2526 CHECK_CMP_NAN(a, b);
2527 return RBOOL(a <= b);
2531double_cmp_gt(
double a,
double b)
2533 CHECK_CMP_NAN(a, b);
2534 return RBOOL(a > b);
2538double_cmp_ge(
double a,
double b)
2540 CHECK_CMP_NAN(a, b);
2541 return RBOOL(a >= b);
2545static inline VALUE *
2550 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2551 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2553 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2554 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2555 int params = ISEQ_BODY(cfp->iseq)->param.size;
2558 bp += vm_ci_argc(ci);
2561 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2565#if VM_DEBUG_BP_CHECK
2566 if (bp != cfp->bp_check) {
2567 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2568 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2569 (
long)(bp - GET_EC()->vm_stack));
2570 rb_bug(
"vm_base_ptr: unreachable");
2583 return vm_base_ptr(cfp);
2598static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2603 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2605 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2611 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2614 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2615 int param = ISEQ_BODY(iseq)->param.size;
2616 int local = ISEQ_BODY(iseq)->local_table_size;
2617 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2623 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2624 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2627 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2628 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2629 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2630 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2634rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2636 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2637 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2638 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2639 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2640 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2641 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2642 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2643 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2647rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2649 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2650 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2651 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2652 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2653 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2654 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2655 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2658#define ALLOW_HEAP_ARGV (-2)
2659#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2664 vm_check_canary(GET_EC(), cfp->sp);
2670 int argc = calling->argc;
2672 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2676 VALUE *argv = cfp->sp - argc;
2680 cfp->sp -= argc - 1;
2681 cfp->sp[-1] = argv_ary;
2683 calling->heap_argv = argv_ary;
2689 if (max_args >= 0 &&
len + argc > max_args) {
2697 calling->argc +=
len - (max_args - argc + 1);
2698 len = max_args - argc + 1;
2707 calling->heap_argv = 0;
2709 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2711 for (i = 0; i <
len; i++) {
2712 *cfp->sp++ = ptr[i];
2724 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2725 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2726 const VALUE h = rb_hash_new_with_size(kw_len);
2727 VALUE *sp = cfp->sp;
2730 for (i=0; i<kw_len; i++) {
2731 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2735 cfp->sp -= kw_len - 1;
2736 calling->argc -= kw_len - 1;
2737 calling->kw_splat = 1;
2741vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2744 if (keyword_hash !=
Qnil) {
2746 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2749 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2755 keyword_hash = rb_hash_dup(keyword_hash);
2757 return keyword_hash;
2763 const struct rb_callinfo *restrict ci,
int max_args)
2765 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2766 if (IS_ARGS_KW_SPLAT(ci)) {
2768 VM_ASSERT(calling->kw_splat == 1);
2772 VALUE ary = cfp->sp[0];
2773 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2776 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2780 if (UNLIKELY(calling->heap_argv)) {
2782 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2783 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2784 calling->kw_splat = 0;
2792 VM_ASSERT(calling->kw_splat == 1);
2796 calling->kw_splat = 0;
2801 VM_ASSERT(calling->kw_splat == 0);
2805 VALUE ary = cfp->sp[0];
2807 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2812 VALUE last_hash, argv_ary;
2813 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2814 if (!IS_ARGS_KEYWORD(ci) &&
2817 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2822 calling->kw_splat = 1;
2828 if (!IS_ARGS_KEYWORD(ci) &&
2829 calling->argc > 0 &&
2831 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2838 cfp->sp[-1] = rb_hash_dup(last_hash);
2839 calling->kw_splat = 1;
2845 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2847 VM_ASSERT(calling->kw_splat == 1);
2848 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2853 calling->kw_splat = 0;
2859 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2861 VM_ASSERT(calling->kw_splat == 0);
2867 vm_caller_setup_arg_kw(cfp, calling, ci);
2871#define USE_OPT_HIST 0
2874#define OPT_HIST_MAX 64
2875static int opt_hist[OPT_HIST_MAX+1];
2879opt_hist_show_results_at_exit(
void)
2881 for (
int i=0; i<OPT_HIST_MAX; i++) {
2882 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2892 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2893 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2894 const int opt = calling->argc - lead_num;
2895 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2896 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2897 const int param = ISEQ_BODY(iseq)->param.size;
2898 const int local = ISEQ_BODY(iseq)->local_table_size;
2899 const int delta = opt_num - opt;
2901 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2904 if (opt_pc < OPT_HIST_MAX) {
2908 opt_hist[OPT_HIST_MAX]++;
2912 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2920 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2921 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2922 const int opt = calling->argc - lead_num;
2923 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2925 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2928 if (opt_pc < OPT_HIST_MAX) {
2932 opt_hist[OPT_HIST_MAX]++;
2936 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2941 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2942 VALUE *
const locals);
2949 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2950 int param_size = ISEQ_BODY(iseq)->param.size;
2951 int local_size = ISEQ_BODY(iseq)->local_table_size;
2954 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2956 local_size = local_size + vm_ci_argc(calling->cd->ci);
2957 param_size = param_size + vm_ci_argc(calling->cd->ci);
2959 cfp->sp[0] = (
VALUE)calling->cd->ci;
2961 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2971 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2972 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2974 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2975 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2977 const int ci_kw_len = kw_arg->keyword_len;
2978 const VALUE *
const ci_keywords = kw_arg->keywords;
2979 VALUE *argv = cfp->sp - calling->argc;
2980 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2981 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2983 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2984 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2986 int param = ISEQ_BODY(iseq)->param.size;
2987 int local = ISEQ_BODY(iseq)->local_table_size;
2988 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2995 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2998 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2999 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3001 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3002 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3003 VALUE *
const argv = cfp->sp - calling->argc;
3004 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3007 for (i=0; i<kw_param->num; i++) {
3008 klocals[i] = kw_param->default_values[i];
3015 int param = ISEQ_BODY(iseq)->param.size;
3016 int local = ISEQ_BODY(iseq)->local_table_size;
3017 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3027 cfp->sp -= (calling->argc + 1);
3028 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3029 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3038 set_table *dup_check_table = vm->unused_block_warning_table;
3048 .v = (
VALUE)cme->def,
3052 if (!strict_unused_block) {
3053 key = (st_data_t)cme->def->original_id;
3055 if (set_lookup(dup_check_table, key)) {
3065 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3070 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3071 fprintf(stderr,
"key:%p\n", (
void *)key);
3075 if (set_insert(dup_check_table, key)) {
3080 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3082 if (!
NIL_P(m_loc)) {
3083 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3087 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3094 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3099 VM_ASSERT((vm_ci_argc(ci), 1));
3100 VM_ASSERT(vm_cc_cme(cc) != NULL);
3102 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3103 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3104 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3105 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3108 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3109 if (LIKELY(rb_simple_iseq_p(iseq))) {
3111 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3112 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3114 if (calling->argc != lead_num) {
3115 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
3119 VM_ASSERT(cc == calling->cc);
3121 if (vm_call_iseq_optimizable_p(ci, cc)) {
3122 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3124 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3125 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3126 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3129 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3134 else if (rb_iseq_only_optparam_p(iseq)) {
3137 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3138 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3140 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3141 const int argc = calling->argc;
3142 const int opt = argc - lead_num;
3144 if (opt < 0 || opt > opt_num) {
3145 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
3148 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3149 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3150 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3151 vm_call_cacheable(ci, cc));
3154 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3155 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3156 vm_call_cacheable(ci, cc));
3160 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3161 for (
int i=argc; i<lead_num + opt_num; i++) {
3164 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3166 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3167 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3168 const int argc = calling->argc;
3169 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3171 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3174 if (argc - kw_arg->keyword_len == lead_num) {
3175 const int ci_kw_len = kw_arg->keyword_len;
3176 const VALUE *
const ci_keywords = kw_arg->keywords;
3178 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3180 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3181 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3183 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3184 vm_call_cacheable(ci, cc));
3189 else if (argc == lead_num) {
3191 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3192 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3194 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3196 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3197 vm_call_cacheable(ci, cc));
3223 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3224 bool can_fastpath =
true;
3226 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3228 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3229 ci = vm_ci_new_runtime(
3236 ci = forward_cd->caller_ci;
3238 can_fastpath =
false;
3242 if (!vm_ci_markable(ci)) {
3243 ci = vm_ci_new_runtime(
3248 can_fastpath =
false;
3250 argv[param_size - 1] = (
VALUE)ci;
3251 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3255 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3282 const VALUE * lep = VM_CF_LEP(cfp);
3288 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3293 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3297 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3299 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3300 VALUE * to = cfp->sp - 1;
3304 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3309 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3311 cfp->sp = to + argc;
3330 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3333 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3334 int param_size = ISEQ_BODY(iseq)->param.size;
3335 int local_size = ISEQ_BODY(iseq)->local_table_size;
3337 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3339 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3340 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3346 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3349 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3350 int param_size = ISEQ_BODY(iseq)->param.size;
3351 int local_size = ISEQ_BODY(iseq)->local_table_size;
3353 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3356 local_size = local_size + vm_ci_argc(calling->cd->ci);
3357 param_size = param_size + vm_ci_argc(calling->cd->ci);
3359 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3360 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3365 int opt_pc,
int param_size,
int local_size)
3370 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3371 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3374 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3380 int opt_pc,
int param_size,
int local_size)
3382 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3383 VALUE *argv = cfp->sp - calling->argc;
3384 VALUE *sp = argv + param_size;
3385 cfp->sp = argv - 1 ;
3387 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3388 calling->block_handler, (
VALUE)me,
3389 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3390 local_size - param_size,
3391 ISEQ_BODY(iseq)->stack_max);
3400 VALUE *argv = cfp->sp - calling->argc;
3402 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3403 VALUE *src_argv = argv;
3404 VALUE *sp_orig, *sp;
3405 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3407 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3408 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3409 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3410 dst_captured->code.val = src_captured->code.val;
3411 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3412 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3415 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3419 vm_pop_frame(ec, cfp, cfp->ep);
3422 sp_orig = sp = cfp->sp;
3425 sp[0] = calling->recv;
3429 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3430 *sp++ = src_argv[i];
3433 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3434 calling->recv, calling->block_handler, (
VALUE)me,
3435 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3436 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3437 ISEQ_BODY(iseq)->stack_max);
3445ractor_unsafe_check(
void)
3447 if (!rb_ractor_main_p()) {
3448 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3455 ractor_unsafe_check();
3463 ractor_unsafe_check();
3465 return (*f)(argc, argv, recv);
3471 ractor_unsafe_check();
3479 ractor_unsafe_check();
3481 return (*f)(recv, argv[0]);
3487 ractor_unsafe_check();
3489 return (*f)(recv, argv[0], argv[1]);
3495 ractor_unsafe_check();
3497 return (*f)(recv, argv[0], argv[1], argv[2]);
3503 ractor_unsafe_check();
3505 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3511 ractor_unsafe_check();
3512 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3513 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3519 ractor_unsafe_check();
3520 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3521 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3527 ractor_unsafe_check();
3528 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3529 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3535 ractor_unsafe_check();
3536 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3537 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3543 ractor_unsafe_check();
3544 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3545 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3551 ractor_unsafe_check();
3552 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3553 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3559 ractor_unsafe_check();
3560 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3561 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3567 ractor_unsafe_check();
3568 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3569 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3575 ractor_unsafe_check();
3576 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3577 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3583 ractor_unsafe_check();
3584 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3585 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3591 ractor_unsafe_check();
3592 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3593 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3607 return (*f)(argc, argv, recv);
3621 return (*f)(recv, argv[0]);
3628 return (*f)(recv, argv[0], argv[1]);
3635 return (*f)(recv, argv[0], argv[1], argv[2]);
3642 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3648 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3649 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3655 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3656 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3662 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3663 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3669 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3670 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3676 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3677 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3683 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3684 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3690 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3691 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3697 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3698 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3704 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3705 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3711 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3712 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3718 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3719 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3725 const int ov_flags = RAISED_STACKOVERFLOW;
3726 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3727 if (rb_ec_raised_p(ec, ov_flags)) {
3728 rb_ec_raised_reset(ec, ov_flags);
3734#define CHECK_CFP_CONSISTENCY(func) \
3735 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3736 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3742#if VM_DEBUG_VERIFY_METHOD_CACHE
3743 switch (me->def->type) {
3744 case VM_METHOD_TYPE_CFUNC:
3745 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3747# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3749 METHOD_BUG(ATTRSET);
3751 METHOD_BUG(BMETHOD);
3754 METHOD_BUG(OPTIMIZED);
3755 METHOD_BUG(MISSING);
3756 METHOD_BUG(REFINED);
3760 rb_bug(
"wrong method type: %d", me->def->type);
3763 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3770 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3777 VALUE recv = calling->recv;
3778 VALUE block_handler = calling->block_handler;
3779 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3781 if (UNLIKELY(calling->kw_splat)) {
3782 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3785 VM_ASSERT(reg_cfp == ec->cfp);
3787 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3790 vm_push_frame(ec, NULL, frame_type, recv,
3791 block_handler, (
VALUE)me,
3792 0, ec->cfp->sp, 0, 0);
3794 int len = cfunc->argc;
3797 reg_cfp->sp = stack_bottom;
3798 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3800 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3802 rb_vm_pop_frame(ec);
3804 VM_ASSERT(ec->cfp->sp == stack_bottom);
3806 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3807 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3817 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3819 VALUE *sp = ec->cfp->sp;
3820 VALUE recv = *(sp - recv_idx - 1);
3821 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3822 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3823#if VM_CHECK_MODE > 0
3825 *(GET_EC()->cfp->sp) =
Qfalse;
3827 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3832rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3834 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3840 int argc = calling->argc;
3841 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3842 VALUE *argv = &stack_bottom[1];
3844 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3851 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3853 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3855 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3856 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3859 VALUE *stack_bottom = reg_cfp->sp - 2;
3861 VM_ASSERT(calling->argc == 1);
3865 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3868 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3870 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3877 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3880 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3881 return vm_call_cfunc_other(ec, reg_cfp, calling);
3885 calling->kw_splat = 0;
3887 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3888 VALUE *sp = stack_bottom;
3889 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3890 for(i = 0; i < argc; i++) {
3895 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3901 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3902 VALUE argv_ary = reg_cfp->sp[-1];
3906 int argc_offset = 0;
3908 if (UNLIKELY(argc > 0 &&
3910 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3912 return vm_call_cfunc_other(ec, reg_cfp, calling);
3916 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3922 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3923 VALUE keyword_hash = reg_cfp->sp[-1];
3926 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3929 return vm_call_cfunc_other(ec, reg_cfp, calling);
3936 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3938 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3939 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3941 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3942 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3944 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3946 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3947 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3951 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3952 return vm_call_cfunc_other(ec, reg_cfp, calling);
3959 RB_DEBUG_COUNTER_INC(ccf_ivar);
3961 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3968 RB_DEBUG_COUNTER_INC(ccf_attrset);
3969 VALUE val = *(cfp->sp - 1);
3971 attr_index_t index = vm_cc_attr_index(cc);
3972 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3973 ID id = vm_cc_cme(cc)->def->body.attr.id;
3974 rb_check_frozen(obj);
3975 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3984 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3985 if (!UNDEF_P(res)) {
3990 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3998 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4008 VALUE procv = cme->def->body.bmethod.proc;
4011 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4012 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4016 GetProcPtr(procv, proc);
4017 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4027 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4031 VALUE procv = cme->def->body.bmethod.proc;
4034 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4035 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4039 GetProcPtr(procv, proc);
4040 const struct rb_block *block = &proc->block;
4042 while (vm_block_type(block) == block_type_proc) {
4043 block = vm_proc_block(block->as.proc);
4045 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4048 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4049 VALUE *
const argv = cfp->sp - calling->argc;
4050 const int arg_size = ISEQ_BODY(iseq)->param.size;
4053 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4054 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4057 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4062 vm_push_frame(ec, iseq,
4063 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4065 VM_GUARDED_PREV_EP(captured->ep),
4067 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4069 ISEQ_BODY(iseq)->local_table_size - arg_size,
4070 ISEQ_BODY(iseq)->stack_max);
4078 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4082 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4083 if (UNLIKELY(calling->heap_argv)) {
4088 argc = calling->argc;
4091 cfp->sp += - argc - 1;
4094 return vm_call_bmethod_body(ec, calling, argv);
4100 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4104 VALUE procv = cme->def->body.bmethod.proc;
4106 GetProcPtr(procv, proc);
4107 const struct rb_block *block = &proc->block;
4109 while (vm_block_type(block) == block_type_proc) {
4110 block = vm_proc_block(block->as.proc);
4112 if (vm_block_type(block) == block_type_iseq) {
4113 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4114 return vm_call_iseq_bmethod(ec, cfp, calling);
4117 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4118 return vm_call_noniseq_bmethod(ec, cfp, calling);
4122rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4124 VALUE klass = current_class;
4132 while (
RTEST(klass)) {
4134 if (owner == target_owner) {
4140 return current_class;
4149 if (orig_me->defined_class == 0) {
4150 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4151 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4152 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4154 if (me->def->reference_count == 1) {
4155 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4159 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4167 VM_ASSERT(callable_method_entry_p(cme));
4174 return aliased_callable_method_entry(me);
4180 calling->cc = &VM_CC_ON_STACK(
Qundef,
4183 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4185 return vm_call_method_each_type(ec, cfp, calling);
4188static enum method_missing_reason
4191 enum method_missing_reason stat = MISSING_NOENTRY;
4192 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4193 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4194 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4204 ASSUME(calling->argc >= 0);
4206 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4207 int argc = calling->argc;
4208 VALUE recv = calling->recv;
4211 flags |= VM_CALL_OPT_SEND;
4213 if (UNLIKELY(! mid)) {
4214 mid = idMethodMissing;
4215 missing_reason = ci_missing_reason(ci);
4216 ec->method_missing_reason = missing_reason;
4219 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4220 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4221 rb_ary_unshift(argv_ary, symbol);
4224 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4225 VALUE exc = rb_make_no_method_exception(
4247 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4250 argc = ++calling->argc;
4252 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4255 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4256 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4257 VALUE exc = rb_make_no_method_exception(
4270 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4276 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4277 calling->cd = &new_fcd.cd;
4281 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4282 new_fcd.caller_ci = caller_ci;
4285 calling->cc = &VM_CC_ON_STACK(klass,
4287 { .method_missing_reason = missing_reason },
4288 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4290 if (flags & VM_CALL_FCALL) {
4291 return vm_call_method(ec, reg_cfp, calling);
4295 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4297 if (vm_cc_cme(cc) != NULL) {
4298 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4299 case METHOD_VISI_PUBLIC:
4300 return vm_call_method_each_type(ec, reg_cfp, calling);
4301 case METHOD_VISI_PRIVATE:
4302 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4304 case METHOD_VISI_PROTECTED:
4305 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4308 VM_UNREACHABLE(vm_call_method);
4310 return vm_call_method_missing(ec, reg_cfp, calling);
4313 return vm_call_method_nome(ec, reg_cfp, calling);
4323 i = calling->argc - 1;
4325 if (calling->argc == 0) {
4326 rb_raise(rb_eArgError,
"no method name given");
4350 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4356 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4358 int flags = VM_CALL_FCALL;
4362 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4363 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4365 flags |= VM_CALL_ARGS_SPLAT;
4366 if (calling->kw_splat) {
4367 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4368 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4369 calling->kw_splat = 0;
4371 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4374 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4375 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4381 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4382 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4388 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4391 int flags = vm_ci_flag(ci);
4393 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4394 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4395 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4396 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4397 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4398 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4401 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4402 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4407 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4409 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4411 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4412 unsigned int argc, flag;
4414 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4415 argc = ++calling->argc;
4418 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4419 vm_check_canary(ec, reg_cfp->sp);
4423 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4426 ec->method_missing_reason = reason;
4430 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4436 if (!(flag & VM_CALL_FORWARDING)) {
4437 calling->cd = &new_fcd.cd;
4441 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4442 new_fcd.caller_ci = caller_ci;
4446 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4447 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4448 return vm_call_method(ec, reg_cfp, calling);
4454 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4465 return vm_call_method_nome(ec, cfp, calling);
4467 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4468 cme->def->body.refined.orig_me) {
4469 cme = refined_method_callable_without_refinement(cme);
4472 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4474 return vm_call_method_each_type(ec, cfp, calling);
4478find_refinement(
VALUE refinements,
VALUE klass)
4480 if (
NIL_P(refinements)) {
4483 return rb_hash_lookup(refinements, klass);
4492 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4493 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4496 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4497 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4501 }
while (cfp->iseq != local_iseq);
4512 if (orig_me->defined_class == 0) {
4520 VM_ASSERT(callable_method_entry_p(cme));
4522 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4532 ID mid = vm_ci_mid(calling->cd->ci);
4533 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4537 for (; cref; cref = CREF_NEXT(cref)) {
4538 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4539 if (
NIL_P(refinement))
continue;
4542 rb_callable_method_entry(refinement, mid);
4545 if (vm_cc_call(cc) == vm_call_super_method) {
4548 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4553 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4554 cme->def != ref_me->def) {
4557 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4566 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4567 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4582 if (calling->cd->cc) {
4583 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4585 return vm_call_method(ec, cfp, calling);
4588 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4589 calling->cc= ref_cc;
4590 return vm_call_method(ec, cfp, calling);
4594 return vm_call_method_nome(ec, cfp, calling);
4600NOINLINE(
static VALUE
4608 int argc = calling->argc;
4611 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4614 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4620 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4623 VALUE procval = calling->recv;
4624 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4630 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4632 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4635 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4636 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4639 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4640 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4641 return vm_call_general(ec, reg_cfp, calling);
4648 VALUE recv = calling->recv;
4651 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4652 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4654 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4655 return internal_RSTRUCT_GET(recv,
off);
4661 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4663 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4671 VALUE recv = calling->recv;
4674 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4675 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4677 rb_check_frozen(recv);
4679 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4680 internal_RSTRUCT_SET(recv,
off, val);
4688 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4690 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4698#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4699 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4700 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4701 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4703 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4704 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4715 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4716 case OPTIMIZED_METHOD_TYPE_SEND:
4717 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4718 return vm_call_opt_send(ec, cfp, calling);
4719 case OPTIMIZED_METHOD_TYPE_CALL:
4720 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4721 return vm_call_opt_call(ec, cfp, calling);
4722 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4723 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4724 return vm_call_opt_block_call(ec, cfp, calling);
4725 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4726 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4730 VM_CALL_METHOD_ATTR(v,
4731 vm_call_opt_struct_aref(ec, cfp, calling),
4732 set_vm_cc_ivar(cc); \
4733 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4736 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4737 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4741 VM_CALL_METHOD_ATTR(v,
4742 vm_call_opt_struct_aset(ec, cfp, calling),
4743 set_vm_cc_ivar(cc); \
4744 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4748 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4760 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4762 switch (cme->def->type) {
4763 case VM_METHOD_TYPE_ISEQ:
4764 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4765 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4766 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4769 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4770 return vm_call_iseq_setup(ec, cfp, calling);
4773 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4774 case VM_METHOD_TYPE_CFUNC:
4775 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4776 return vm_call_cfunc(ec, cfp, calling);
4778 case VM_METHOD_TYPE_ATTRSET:
4779 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4783 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4785 if (vm_cc_markable(cc)) {
4786 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4787 VM_CALL_METHOD_ATTR(v,
4788 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4789 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4795 VM_CALLCACHE_UNMARKABLE |
4796 VM_CALLCACHE_ON_STACK,
4802 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4807 VM_CALL_METHOD_ATTR(v,
4808 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4809 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4813 case VM_METHOD_TYPE_IVAR:
4814 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4816 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4817 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4818 VM_CALL_METHOD_ATTR(v,
4819 vm_call_ivar(ec, cfp, calling),
4820 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4823 case VM_METHOD_TYPE_MISSING:
4824 vm_cc_method_missing_reason_set(cc, 0);
4825 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4826 return vm_call_method_missing(ec, cfp, calling);
4828 case VM_METHOD_TYPE_BMETHOD:
4829 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4830 return vm_call_bmethod(ec, cfp, calling);
4832 case VM_METHOD_TYPE_ALIAS:
4833 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4834 return vm_call_alias(ec, cfp, calling);
4836 case VM_METHOD_TYPE_OPTIMIZED:
4837 return vm_call_optimized(ec, cfp, calling, ci, cc);
4839 case VM_METHOD_TYPE_UNDEF:
4842 case VM_METHOD_TYPE_ZSUPER:
4843 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4845 case VM_METHOD_TYPE_REFINED:
4848 return vm_call_refined(ec, cfp, calling);
4851 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4861 const int stat = ci_missing_reason(ci);
4863 if (vm_ci_mid(ci) == idMethodMissing) {
4864 if (UNLIKELY(calling->heap_argv)) {
4869 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4870 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4874 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4886 VALUE defined_class = me->defined_class;
4887 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4888 return NIL_P(refined_class) ? defined_class : refined_class;
4897 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4899 if (vm_cc_cme(cc) != NULL) {
4900 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4901 case METHOD_VISI_PUBLIC:
4902 return vm_call_method_each_type(ec, cfp, calling);
4904 case METHOD_VISI_PRIVATE:
4905 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4906 enum method_missing_reason stat = MISSING_PRIVATE;
4907 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4909 vm_cc_method_missing_reason_set(cc, stat);
4910 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4911 return vm_call_method_missing(ec, cfp, calling);
4913 return vm_call_method_each_type(ec, cfp, calling);
4915 case METHOD_VISI_PROTECTED:
4916 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4917 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4919 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4920 return vm_call_method_missing(ec, cfp, calling);
4924 VM_ASSERT(vm_cc_cme(cc) != NULL);
4927 calling->cc = &cc_on_stack;
4928 return vm_call_method_each_type(ec, cfp, calling);
4931 return vm_call_method_each_type(ec, cfp, calling);
4934 rb_bug(
"unreachable");
4938 return vm_call_method_nome(ec, cfp, calling);
4945 RB_DEBUG_COUNTER_INC(ccf_general);
4946 return vm_call_method(ec, reg_cfp, calling);
4952 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4953 VM_ASSERT(cc != vm_cc_empty());
4955 *(vm_call_handler *)&cc->call_ = vm_call_general;
4961 RB_DEBUG_COUNTER_INC(ccf_super_method);
4966 if (ec == NULL) rb_bug(
"unreachable");
4969 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4970 return vm_call_method(ec, reg_cfp, calling);
4976vm_search_normal_superclass(
VALUE klass)
4981 klass =
RBASIC(klass)->klass;
4983 klass = RCLASS_ORIGIN(klass);
4987NORETURN(
static void vm_super_outside(
void));
4990vm_super_outside(
void)
4996empty_cc_for_super(
void)
4998 return &vm_empty_cc_for_super;
5004 VALUE current_defined_class;
5011 current_defined_class = vm_defined_class_for_protected_call(me);
5014 reg_cfp->iseq != method_entry_iseqptr(me) &&
5017 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5021 "self has wrong type to call super in this context: "
5022 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5027 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5029 "implicit argument passing of super from method defined"
5030 " by define_method() is not supported."
5031 " Specify all arguments explicitly.");
5034 ID mid = me->def->original_id;
5036 if (!vm_ci_markable(cd->ci)) {
5037 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5041 cd->ci = vm_ci_new_runtime(mid,
5044 vm_ci_kwarg(cd->ci));
5051 VALUE klass = vm_search_normal_superclass(me->defined_class);
5055 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5059 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5063 if (cached_cme == NULL) {
5065 cd->cc = empty_cc_for_super();
5067 else if (cached_cme->called_id != mid) {
5070 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5074 cd->cc = cc = empty_cc_for_super();
5078 switch (cached_cme->def->type) {
5080 case VM_METHOD_TYPE_REFINED:
5082 case VM_METHOD_TYPE_ATTRSET:
5083 case VM_METHOD_TYPE_IVAR:
5084 vm_cc_call_set(cc, vm_call_super_method);
5092 VM_ASSERT((vm_cc_cme(cc),
true));
5100block_proc_is_lambda(
const VALUE procval)
5105 GetProcPtr(procval, proc);
5106 return proc->is_lambda;
5114block_proc_namespace(
const VALUE procval)
5119 GetProcPtr(procval, proc);
5130 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5133 int is_lambda = FALSE;
5134 VALUE val, arg, blockarg;
5136 const struct vm_ifunc *ifunc = captured->code.ifunc;
5141 else if (argc == 0) {
5148 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5150 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5152 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5155 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5158 VM_GUARDED_PREV_EP(captured->ep),
5160 0, ec->cfp->sp, 0, 0);
5161 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5162 rb_vm_pop_frame(ec);
5170 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5176 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5185 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5187 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5195vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5197 VALUE ary, arg0 = argv[0];
5202 VM_ASSERT(argv[0] == arg0);
5210 if (rb_simple_iseq_p(iseq)) {
5214 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5216 if (arg_setup_type == arg_setup_block &&
5217 calling->argc == 1 &&
5218 ISEQ_BODY(iseq)->param.flags.has_lead &&
5219 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5220 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5221 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5224 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5225 if (arg_setup_type == arg_setup_block) {
5226 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5228 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5229 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5230 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5232 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5233 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5237 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5244 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5253 calling = &calling_entry;
5254 calling->argc = argc;
5255 calling->block_handler = block_handler;
5256 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5258 calling->heap_argv = 0;
5259 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5261 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5269 bool is_lambda,
VALUE block_handler)
5272 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5273 const int arg_size = ISEQ_BODY(iseq)->param.size;
5274 VALUE *
const rsp = GET_SP() - calling->argc;
5275 VALUE *
const argv = rsp;
5276 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5277 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5281 if (calling->proc_ns) {
5282 frame_flag |= VM_FRAME_FLAG_NS_SWITCH;
5285 vm_push_frame(ec, iseq,
5288 VM_GUARDED_PREV_EP(captured->ep), 0,
5289 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5291 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5299 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5301 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5302 int flags = vm_ci_flag(ci);
5304 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5305 ((calling->argc == 0) ||
5306 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5307 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5308 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5309 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5311 if (UNLIKELY(calling->heap_argv)) {
5312#if VM_ARGC_STACK_MAX < 0
5314 rb_raise(rb_eArgError,
"no receiver given");
5320 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5321 reg_cfp->sp[-2] = calling->recv;
5322 flags |= VM_CALL_ARGS_SPLAT;
5325 if (calling->argc < 1) {
5326 rb_raise(rb_eArgError,
"no receiver given");
5328 calling->recv = TOPN(--calling->argc);
5330 if (calling->kw_splat) {
5331 flags |= VM_CALL_KW_SPLAT;
5335 if (calling->argc < 1) {
5336 rb_raise(rb_eArgError,
"no receiver given");
5338 calling->recv = TOPN(--calling->argc);
5341 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5347 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5352 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5353 argc = calling->argc;
5354 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5360vm_proc_to_block_handler(
VALUE procval)
5362 const struct rb_block *block = vm_proc_block(procval);
5364 switch (vm_block_type(block)) {
5365 case block_type_iseq:
5366 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5367 case block_type_ifunc:
5368 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5369 case block_type_symbol:
5370 return VM_BH_FROM_SYMBOL(block->as.symbol);
5371 case block_type_proc:
5372 return VM_BH_FROM_PROC(block->as.proc);
5374 VM_UNREACHABLE(vm_yield_with_proc);
5381 bool is_lambda,
VALUE block_handler)
5383 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5384 VALUE proc = VM_BH_TO_PROC(block_handler);
5385 if (!calling->proc_ns) {
5386 calling->proc_ns = block_proc_namespace(proc);
5388 is_lambda = block_proc_is_lambda(proc);
5389 block_handler = vm_proc_to_block_handler(proc);
5392 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5398 bool is_lambda,
VALUE block_handler)
5402 bool is_lambda,
VALUE block_handler);
5404 switch (vm_block_handler_type(block_handler)) {
5405 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5406 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5407 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5408 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5409 default: rb_bug(
"vm_invoke_block: unreachable");
5412 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5416vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5423 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5426 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5427 captured->code.iseq = blockiseq;
5429 return rb_vm_make_proc(ec, captured,
rb_cProc);
5433vm_once_exec(
VALUE iseq)
5440vm_once_clear(
VALUE data)
5443 is->once.running_thread = NULL;
5455 args[0] = obj; args[1] =
Qfalse;
5457 if (!UNDEF_P(r) &&
RTEST(r)) {
5469 enum defined_type
type = (
enum defined_type)op_type;
5476 return rb_gvar_defined(
SYM2ID(obj));
5478 case DEFINED_CVAR: {
5479 const rb_cref_t *cref = vm_get_cref(GET_EP());
5480 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5485 case DEFINED_CONST_FROM: {
5486 bool allow_nil =
type == DEFINED_CONST;
5488 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5493 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5495 case DEFINED_METHOD:{
5500 switch (METHOD_ENTRY_VISI(me)) {
5501 case METHOD_VISI_PRIVATE:
5503 case METHOD_VISI_PROTECTED:
5507 case METHOD_VISI_PUBLIC:
5511 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5515 return check_respond_to_missing(obj, v);
5520 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5524 case DEFINED_ZSUPER:
5529 VALUE klass = vm_search_normal_superclass(me->defined_class);
5530 if (!klass)
return false;
5532 ID id = me->def->original_id;
5539 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5541 rb_bug(
"unimplemented defined? type (VM)");
5551 return vm_defined(ec, reg_cfp, op_type, obj, v);
5555vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5558 const VALUE *ep = reg_ep;
5559 for (i = 0; i < lv; i++) {
5560 ep = GET_PREV_EP(ep);
5566vm_get_special_object(
const VALUE *
const reg_ep,
5567 enum vm_special_object_type
type)
5570 case VM_SPECIAL_OBJECT_VMCORE:
5571 return rb_mRubyVMFrozenCore;
5572 case VM_SPECIAL_OBJECT_CBASE:
5573 return vm_get_cbase(reg_ep);
5574 case VM_SPECIAL_OBJECT_CONST_BASE:
5575 return vm_get_const_base(reg_ep);
5577 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5584 const VALUE ary2 = ary2st;
5585 VALUE tmp1 = rb_check_to_array(ary1);
5586 VALUE tmp2 = rb_check_to_array(ary2);
5607 const VALUE ary2 = ary2st;
5609 if (
NIL_P(ary2))
return ary1;
5611 VALUE tmp2 = rb_check_to_array(ary2);
5626 return vm_concat_array(ary1, ary2st);
5630rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5632 return vm_concat_to_array(ary1, ary2st);
5641 VALUE tmp = rb_check_to_array(ary);
5645 else if (
RTEST(flag)) {
5658 return vm_splat_array(flag, ary);
5664 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5666 if (flag & VM_CHECKMATCH_ARRAY) {
5670 for (i = 0; i < n; i++) {
5672 VALUE c = check_match(ec, v, target,
type);
5681 return check_match(ec, pattern, target,
type);
5688 return vm_check_match(ec, target, pattern, flag);
5692vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5694 const VALUE kw_bits = *(ep - bits);
5697 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5698 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5711 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5712 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5713 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5714 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5718 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5721 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5724 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5727 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5734vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5739 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5740 return rb_public_const_get_at(cbase,
id);
5748vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5753 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5758 "superclass mismatch for class %"PRIsVALUE
"",
5771vm_check_if_module(
ID id,
VALUE mod)
5790vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5793 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5801vm_declare_module(
ID id,
VALUE cbase)
5807NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5811 VALUE name = rb_id2str(
id);
5812 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5814 VALUE location = rb_const_source_location_at(cbase,
id);
5815 if (!
NIL_P(location)) {
5816 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5817 " previous definition of %"PRIsVALUE
" was here",
5824vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5828 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5830 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5834 vm_check_if_namespace(cbase);
5839 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5840 if (!vm_check_if_class(
id, flags, super, klass))
5841 unmatched_redefinition(
"class", cbase,
id, klass);
5845 return vm_declare_class(
id, flags, cbase, super);
5850vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5854 vm_check_if_namespace(cbase);
5855 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5856 if (!vm_check_if_module(
id, mod))
5857 unmatched_redefinition(
"module", cbase,
id, mod);
5861 return vm_declare_module(
id, cbase);
5866vm_find_or_create_class_by_id(
ID id,
5871 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5874 case VM_DEFINECLASS_TYPE_CLASS:
5876 return vm_define_class(
id, flags, cbase, super);
5878 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5882 case VM_DEFINECLASS_TYPE_MODULE:
5884 return vm_define_module(
id, flags, cbase);
5887 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5891static rb_method_visibility_t
5896 if (!vm_env_cref_by_cref(cfp->ep)) {
5897 return METHOD_VISI_PUBLIC;
5900 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5909 if (!vm_env_cref_by_cref(cfp->ep)) {
5913 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5921 rb_method_visibility_t visi;
5926 visi = METHOD_VISI_PUBLIC;
5929 klass = CREF_CLASS_FOR_DEFINITION(cref);
5930 visi = vm_scope_visibility_get(ec);
5937 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5940 RCLASS_WRITE_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
5943 if (!is_singleton && vm_scope_module_func_check(ec)) {
5945 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5955 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5957 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5958 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5961 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5965enum method_explorer_type {
5967 mexp_search_invokeblock,
5976 VALUE block_handler,
5977 enum method_explorer_type method_explorer
5982 int argc = vm_ci_argc(ci);
5983 VALUE recv = TOPN(argc);
5985 .block_handler = block_handler,
5986 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5992 switch (method_explorer) {
5993 case mexp_search_method:
5994 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5995 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5997 case mexp_search_super:
5998 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5999 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6001 case mexp_search_invokeblock:
6002 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6019 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6020 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6022 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6024 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6029 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6030 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6041 VALUE bh = VM_BLOCK_HANDLER_NONE;
6042 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6057 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6058 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6060 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6062 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6067 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6068 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6079 VALUE bh = VM_BLOCK_HANDLER_NONE;
6080 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6108 if (check_method_basic_definition(vm_cc_cme(cc))) {
6117 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6123 val = rb_mod_to_s(recv);
6129 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6130 return rb_nil_to_s(recv);
6134 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6135 return rb_true_to_s(recv);
6139 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6140 return rb_false_to_s(recv);
6144 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6145 return rb_fix_to_s(recv);
6153vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6155 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6164vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6166 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6175vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6177 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6191 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6195 VALUE args[1] = {target};
6198 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6201 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6208 return vm_opt_duparray_include_p(ec, ary, target);
6214 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6219 VALUE result = *ptr;
6220 rb_snum_t i = num - 1;
6222 const VALUE v = *++ptr;
6223 if (OPTIMIZED_CMP(v, result) > 0) {
6238 return vm_opt_newarray_max(ec, num, ptr);
6244 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6249 VALUE result = *ptr;
6250 rb_snum_t i = num - 1;
6252 const VALUE v = *++ptr;
6253 if (OPTIMIZED_CMP(v, result) < 0) {
6268 return vm_opt_newarray_min(ec, num, ptr);
6275 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6276 return rb_ary_hash_values(num, ptr);
6286 return vm_opt_newarray_hash(ec, num, ptr);
6295 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6297 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6301 VALUE args[1] = {target};
6309 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6315 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6317 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6318 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6328 if (!UNDEF_P(buffer)) {
6329 args[1] = rb_hash_new_with_size(1);
6330 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6335 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6342 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6348 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6354vm_track_constant_cache(
ID id,
void *ic)
6357 struct rb_id_table *const_cache = vm->constant_cache;
6358 VALUE lookup_result;
6361 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6365 ics = set_init_numtable();
6366 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6381 vm->inserting_constant_cache_id = id;
6383 set_insert(ics, (st_data_t)ic);
6385 vm->inserting_constant_cache_id = (
ID)0;
6393 for (
int i = 0; segments[i]; i++) {
6394 ID id = segments[i];
6395 if (
id == idNULL)
continue;
6396 vm_track_constant_cache(
id, ic);
6406 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6407 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6409 return (ic_cref == NULL ||
6410 ic_cref == vm_get_cref(reg_ep));
6418 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6419 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6424rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6426 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6432 if (ruby_vm_const_missing_count > 0) {
6433 ruby_vm_const_missing_count = 0;
6440 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6445 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6446 rb_yjit_constant_ic_update(iseq, ic, pos);
6455 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6458 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6461 ruby_vm_constant_cache_misses++;
6462 val = vm_get_ev_const_chain(ec, segments);
6463 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6466 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6478 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6479 return is->once.value;
6481 else if (is->once.running_thread == NULL) {
6483 is->once.running_thread = th;
6487 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6490 else if (is->once.running_thread == th) {
6492 return vm_once_exec((
VALUE)iseq);
6496 RUBY_VM_CHECK_INTS(ec);
6503vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6505 switch (OBJ_BUILTIN_TYPE(key)) {
6511 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6512 SYMBOL_REDEFINED_OP_FLAG |
6513 INTEGER_REDEFINED_OP_FLAG |
6514 FLOAT_REDEFINED_OP_FLAG |
6515 NIL_REDEFINED_OP_FLAG |
6516 TRUE_REDEFINED_OP_FLAG |
6517 FALSE_REDEFINED_OP_FLAG |
6518 STRING_REDEFINED_OP_FLAG)) {
6522 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6526 if (rb_hash_stlike_lookup(hash, key, &val)) {
6546 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6547 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6548 static const char stack_consistency_error[] =
6549 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6550#if defined RUBY_DEVEL
6551 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6556 rb_bug(stack_consistency_error, nsp, nbp);
6563 if (FIXNUM_2_P(recv, obj) &&
6564 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6565 return rb_fix_plus_fix(recv, obj);
6567 else if (FLONUM_2_P(recv, obj) &&
6568 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6576 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6581 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6582 return rb_str_opt_plus(recv, obj);
6586 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6597 if (FIXNUM_2_P(recv, obj) &&
6598 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6599 return rb_fix_minus_fix(recv, obj);
6601 else if (FLONUM_2_P(recv, obj) &&
6602 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6610 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6621 if (FIXNUM_2_P(recv, obj) &&
6622 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6623 return rb_fix_mul_fix(recv, obj);
6625 else if (FLONUM_2_P(recv, obj) &&
6626 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6634 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6645 if (FIXNUM_2_P(recv, obj) &&
6646 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6647 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6649 else if (FLONUM_2_P(recv, obj) &&
6650 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6651 return rb_flo_div_flo(recv, obj);
6658 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6659 return rb_flo_div_flo(recv, obj);
6669 if (FIXNUM_2_P(recv, obj) &&
6670 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6671 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6673 else if (FLONUM_2_P(recv, obj) &&
6674 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6682 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6693 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6694 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6696 if (!UNDEF_P(val)) {
6697 return RBOOL(!
RTEST(val));
6707 if (FIXNUM_2_P(recv, obj) &&
6708 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6711 else if (FLONUM_2_P(recv, obj) &&
6712 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6720 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6732 if (FIXNUM_2_P(recv, obj) &&
6733 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6736 else if (FLONUM_2_P(recv, obj) &&
6737 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6745 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6757 if (FIXNUM_2_P(recv, obj) &&
6758 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6761 else if (FLONUM_2_P(recv, obj) &&
6762 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6770 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6782 if (FIXNUM_2_P(recv, obj) &&
6783 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6786 else if (FLONUM_2_P(recv, obj) &&
6787 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6795 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6812 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6821 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6839 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6850 if (FIXNUM_2_P(recv, obj) &&
6851 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6863 if (FIXNUM_2_P(recv, obj) &&
6864 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6865 return rb_fix_aref(recv, obj);
6870 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6872 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6875 return rb_ary_aref1(recv, obj);
6879 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6880 return rb_hash_aref(recv, obj);
6894 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6900 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6901 rb_hash_aset(recv, obj, set);
6913 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6914 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6915 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6916 return rb_hash_aref(recv, key);
6926 return vm_opt_aref_with(recv, key);
6933 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6934 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6935 return rb_hash_aset(recv, key, val);
6945 return vm_opt_aset_with(recv, key, value);
6949vm_opt_length(
VALUE recv,
int bop)
6955 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6956 if (bop == BOP_EMPTY_P) {
6957 return LONG2NUM(RSTRING_LEN(recv));
6964 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6968 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6977vm_opt_empty_p(
VALUE recv)
6979 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6992 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6995 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
7011 case RSHIFT(~0UL, 1):
7014 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7032vm_opt_succ(
VALUE recv)
7035 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7036 return fix_succ(recv);
7042 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7053 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7054 return RBOOL(!
RTEST(recv));
7069 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7073 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7091 VALUE self = GET_SELF();
7093 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7095 if (event & global_hooks->events) {
7098 vm_dtrace(event, ec);
7099 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7105 if (local_hooks != NULL) {
7106 if (event & local_hooks->events) {
7109 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7115#define VM_TRACE_HOOK(target_event, val) do { \
7116 if ((pc_events & (target_event)) & enabled_flags) { \
7117 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7124 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7125 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7126 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7132 const VALUE *pc = reg_cfp->pc;
7133 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7136 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7142 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7145 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7146 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7150 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7151 enabled_flags |= iseq_local_events;
7153 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7155 if (bmethod_frame) {
7157 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7158 bmethod_local_hooks = me->def->body.bmethod.hooks;
7159 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7160 if (bmethod_local_hooks) {
7161 bmethod_local_events = bmethod_local_hooks->events;
7166 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7170 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7178 else if (ec->trace_arg != NULL) {
7186 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7189 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7192 RSTRING_PTR(rb_iseq_path(iseq)),
7193 (
int)rb_iseq_line_no(iseq, pos),
7194 RSTRING_PTR(rb_iseq_label(iseq)));
7196 VM_ASSERT(reg_cfp->pc == pc);
7197 VM_ASSERT(pc_events != 0);
7207 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7208 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7226#if VM_CHECK_MODE > 0
7227NORETURN( NOINLINE( COLDFUNC
7228void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7231Init_vm_stack_canary(
void)
7234 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7235 vm_stack_canary |= 0x01;
7237 vm_stack_canary_was_born =
true;
7242rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7246 const char *insn = rb_insns_name(i);
7250 rb_bug(
"dead canary found at %s: %s", insn, str);
7254void Init_vm_stack_canary(
void) { }
7286 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7293 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7300 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7307 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7314 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7321 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7328 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7335 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7342 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7348 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7349 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7355 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7356 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7362 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7363 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7369 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7370 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7376 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7377 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7383 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7384 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7390 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7391 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7396static builtin_invoker
7397lookup_builtin_invoker(
int argc)
7399 static const builtin_invoker invokers[] = {
7418 return invokers[argc];
7424 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7425 SETUP_CANARY(canary_p);
7426 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7427 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7428 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7435 return invoke_bf(ec, cfp, bf, argv);
7442 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7443 for (
int i=0; i<bf->argc; i++) {
7444 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7446 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7447 (
void *)(uintptr_t)bf->func_ptr);
7450 if (bf->argc == 0) {
7451 return invoke_bf(ec, cfp, bf, NULL);
7454 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7455 return invoke_bf(ec, cfp, bf, argv);
7465 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Internal header for Namespace.
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.