11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
765env_method_entry_unchecked(
VALUE obj,
int can_be_svar)
767 if (obj ==
Qfalse)
return NULL;
769 switch (imemo_type(obj)) {
786 const VALUE *ep = cfp->ep;
789 while (!VM_ENV_LOCAL_P(ep)) {
790 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
791 ep = VM_ENV_PREV_EP(ep);
794 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
800 const VALUE *ep = cfp->ep;
803 while (!VM_ENV_LOCAL_P_UNCHECKED(ep)) {
804 if ((me = env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
805 ep = VM_ENV_PREV_EP_UNCHECKED(ep);
808 return env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
814 switch (me->def->type) {
815 case VM_METHOD_TYPE_ISEQ:
816 return me->def->body.iseq.
iseqptr;
825 switch (me->def->type) {
826 case VM_METHOD_TYPE_ISEQ:
827 return me->def->body.iseq.
cref;
833#if VM_CHECK_MODE == 0
837check_cref(
VALUE obj,
int can_be_svar)
839 if (obj ==
Qfalse)
return NULL;
842 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
845 switch (imemo_type(obj)) {
856 rb_bug(
"check_method_entry: svar should not be there:");
863vm_env_cref(
const VALUE *ep)
867 while (!VM_ENV_LOCAL_P(ep)) {
868 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
869 ep = VM_ENV_PREV_EP(ep);
872 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
876is_cref(
const VALUE v,
int can_be_svar)
879 switch (imemo_type(v)) {
892vm_env_cref_by_cref(
const VALUE *ep)
894 while (!VM_ENV_LOCAL_P(ep)) {
895 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
896 ep = VM_ENV_PREV_EP(ep);
898 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
902cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
904 const VALUE v = *vptr;
908 switch (imemo_type(v)) {
911 new_cref = vm_cref_dup(cref);
916 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
921 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
925 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
934vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
936 if (vm_env_cref_by_cref(ep)) {
940 while (!VM_ENV_LOCAL_P(ep)) {
941 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
942 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
945 ep = VM_ENV_PREV_EP(ep);
947 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
948 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
951 rb_bug(
"vm_cref_dup: unreachable");
956vm_get_cref(
const VALUE *ep)
964 rb_bug(
"vm_get_cref: unreachable");
969rb_vm_get_cref(
const VALUE *ep)
971 return vm_get_cref(ep);
982 return vm_get_cref(cfp->ep);
986vm_get_const_key_cref(
const VALUE *ep)
992 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
993 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
996 cref = CREF_NEXT(cref);
1009 #define ADD_NEW_CREF(new_cref) \
1010 if (new_cref_tail) { \
1011 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
1014 new_cref_head = new_cref; \
1016 new_cref_tail = new_cref;
1020 if (CREF_CLASS(cref) == old_klass) {
1021 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
1022 ADD_NEW_CREF(new_cref);
1023 return new_cref_head;
1025 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
1026 cref = CREF_NEXT(cref);
1027 ADD_NEW_CREF(new_cref);
1033 return new_cref_head;
1042 prev_cref = vm_env_cref(ep);
1048 prev_cref = vm_env_cref(cfp->ep);
1052 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1056vm_get_cbase(
const VALUE *ep)
1058 const rb_cref_t *cref = vm_get_cref(ep);
1060 return CREF_CLASS_FOR_DEFINITION(cref);
1064vm_get_const_base(
const VALUE *ep)
1066 const rb_cref_t *cref = vm_get_cref(ep);
1069 if (!CREF_PUSHED_BY_EVAL(cref)) {
1070 return CREF_CLASS_FOR_DEFINITION(cref);
1072 cref = CREF_NEXT(cref);
1079vm_check_if_namespace(
VALUE klass)
1082 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1087vm_ensure_not_refinement_module(
VALUE self)
1090 rb_warn(
"not defined at the refinement, but at the outer class/module");
1106 if (
NIL_P(orig_klass) && allow_nil) {
1108 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1112 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1113 root_cref = CREF_NEXT(root_cref);
1116 while (cref && CREF_NEXT(cref)) {
1117 if (CREF_PUSHED_BY_EVAL(cref)) {
1121 klass = CREF_CLASS(cref);
1123 cref = CREF_NEXT(cref);
1125 if (!
NIL_P(klass)) {
1129 if ((ce = rb_const_lookup(klass,
id))) {
1130 rb_const_warn_if_deprecated(ce, klass,
id);
1133 if (am == klass)
break;
1135 if (is_defined)
return 1;
1136 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1138 goto search_continue;
1145 if (UNLIKELY(!rb_ractor_main_p())) {
1147 rb_raise(rb_eRactorIsolationError,
1148 "can not access non-shareable objects in constant %"PRIsVALUE
"::%"PRIsVALUE
" by non-main ractor.",
rb_class_path(klass), rb_id2str(
id));
1159 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1160 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1174 vm_check_if_namespace(orig_klass);
1176 return rb_public_const_defined_from(orig_klass,
id);
1179 return rb_public_const_get_from(orig_klass,
id);
1187 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1195 int allow_nil = TRUE;
1196 if (segments[0] == idNULL) {
1201 while (segments[idx]) {
1202 ID id = segments[idx++];
1203 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1216 rb_bug(
"vm_get_cvar_base: no cref");
1219 while (CREF_NEXT(cref) &&
1220 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1221 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1222 cref = CREF_NEXT(cref);
1224 if (top_level_raise && !CREF_NEXT(cref)) {
1228 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1236ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1238fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1241 vm_cc_attr_index_set(cc, index, shape_id);
1244 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1248#define ractor_incidental_shareable_p(cond, val) \
1249 (!(cond) || rb_ractor_shareable_p(val))
1250#define ractor_object_incidental_shareable_p(obj, val) \
1251 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1260 return default_value;
1270 if (UNLIKELY(!rb_ractor_main_p())) {
1278 if (default_value ==
Qundef) {
1286 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1290 fields_obj = rb_obj_fields(obj,
id);
1294 return default_value;
1299 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1300 VALUE *ivar_list = rb_imemo_fields_ptr(fields_obj);
1302 shape_id_t cached_id;
1306 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1309 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1312 if (LIKELY(cached_id == shape_id)) {
1315 if (index == ATTR_INDEX_NOT_SET) {
1316 return default_value;
1319 val = ivar_list[index];
1320#if USE_DEBUG_COUNTER
1321 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1324 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1330#if USE_DEBUG_COUNTER
1332 if (cached_id != INVALID_SHAPE_ID) {
1333 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1336 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1340 if (cached_id != INVALID_SHAPE_ID) {
1341 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1344 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1347 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1350 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1354 if (UNLIKELY(rb_shape_too_complex_p(shape_id))) {
1358 RUBY_ASSERT(table == rb_imemo_fields_complex_tbl(fields_obj));
1360 if (!st_lookup(table,
id, &val)) {
1361 val = default_value;
1365 shape_id_t previous_cached_id = cached_id;
1366 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1369 if (cached_id != previous_cached_id) {
1370 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1373 if (index == ATTR_INDEX_NOT_SET) {
1374 val = default_value;
1378 val = ivar_list[index];
1384 vm_cc_attr_index_initialize(cc, shape_id);
1387 vm_ic_attr_index_initialize(ic, shape_id);
1390 val = default_value;
1395 if (!UNDEF_P(default_value)) {
1403 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1406 return rb_attr_get(obj,
id);
1414populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1416 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1420 vm_cc_attr_index_set(cc, index, next_shape_id);
1423 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1435 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1437 rb_check_frozen(obj);
1439 attr_index_t index = rb_ivar_set_index(obj,
id, val);
1440 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1442 if (!rb_shape_too_complex_p(next_shape_id)) {
1443 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1446 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1456 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1462 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1465NOINLINE(
static VALUE vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1467vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1469 if (UNLIKELY(!rb_ractor_main_p())) {
1473 VALUE fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1474 if (UNLIKELY(!fields_obj)) {
1478 shape_id_t shape_id = RBASIC_SHAPE_ID(fields_obj);
1481 if (shape_id == dest_shape_id) {
1482 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1484 else if (dest_shape_id != INVALID_SHAPE_ID) {
1485 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1486 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1496 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1498 if (shape_id != dest_shape_id) {
1499 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1500 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1503 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1508NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1510vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1512 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1515 if (shape_id == dest_shape_id) {
1516 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1518 else if (dest_shape_id != INVALID_SHAPE_ID) {
1519 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1520 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1530 VALUE fields_obj = rb_obj_fields(obj,
id);
1532 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1534 if (shape_id != dest_shape_id) {
1535 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1536 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1539 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1545vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1553 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1554 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1556 if (LIKELY(shape_id == dest_shape_id)) {
1557 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1560 else if (dest_shape_id != INVALID_SHAPE_ID) {
1561 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1562 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1564 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1566 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1581 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1582 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1588 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1600 VALUE defined_class = 0;
1604 defined_class =
RBASIC(defined_class)->klass;
1607 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1609 rb_bug(
"the cvc table should be set");
1613 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1614 rb_bug(
"should have cvar cache entry");
1619 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1635 cref = vm_get_cref(GET_EP());
1637 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1638 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1640 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1646 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1648 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1654 return vm_getclassvariable(iseq, cfp,
id, ic);
1661 cref = vm_get_cref(GET_EP());
1663 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1664 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1666 rb_class_ivar_set(ic->entry->class_value,
id, val);
1670 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1674 update_classvariable_cache(iseq, klass,
id, cref, ic);
1680 vm_setclassvariable(iseq, cfp,
id, val, ic);
1687 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1698 shape_id_t dest_shape_id;
1700 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1702 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1708 if (!UNDEF_P(vm_setivar_class(obj,
id, val, dest_shape_id, index))) {
1713 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1717 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1724 vm_setinstancevariable(iseq, obj,
id, val, ic);
1730 return vm_getinstancevariable(iseq, obj,
id, ic);
1739 ec->tag->state = RUBY_TAG_FATAL;
1742 ec->tag->state = TAG_THROW;
1744 else if (THROW_DATA_P(err)) {
1745 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1748 ec->tag->state = TAG_RAISE;
1755 const int flag,
const VALUE throwobj)
1763 else if (state == TAG_BREAK) {
1765 const VALUE *ep = GET_EP();
1766 const rb_iseq_t *base_iseq = GET_ISEQ();
1767 escape_cfp = reg_cfp;
1769 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1770 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1771 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1772 ep = escape_cfp->ep;
1773 base_iseq = escape_cfp->iseq;
1776 ep = VM_ENV_PREV_EP(ep);
1777 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1778 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1779 VM_ASSERT(escape_cfp->iseq == base_iseq);
1783 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1789 ep = VM_ENV_PREV_EP(ep);
1791 while (escape_cfp < eocfp) {
1792 if (escape_cfp->ep == ep) {
1793 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1794 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1799 for (i=0; i < ct->size; i++) {
1801 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1803 if (entry->type == CATCH_TYPE_BREAK &&
1804 entry->iseq == base_iseq &&
1805 entry->start < epc && entry->end >= epc) {
1806 if (entry->cont == epc) {
1815 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1820 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1823 else if (state == TAG_RETRY) {
1824 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1826 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1828 else if (state == TAG_RETURN) {
1829 const VALUE *current_ep = GET_EP();
1830 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1831 int in_class_frame = 0;
1833 escape_cfp = reg_cfp;
1836 while (!VM_ENV_LOCAL_P(ep)) {
1837 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1840 ep = VM_ENV_PREV_EP(ep);
1844 while (escape_cfp < eocfp) {
1845 const VALUE *lep = VM_CF_LEP(escape_cfp);
1851 if (lep == target_lep &&
1852 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1853 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1858 if (lep == target_lep) {
1859 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1861 if (in_class_frame) {
1866 const VALUE *tep = current_ep;
1868 while (target_lep != tep) {
1869 if (escape_cfp->ep == tep) {
1871 if (tep == target_ep) {
1875 goto unexpected_return;
1878 tep = VM_ENV_PREV_EP(tep);
1882 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1883 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1885 case ISEQ_TYPE_MAIN:
1887 if (in_class_frame)
goto unexpected_return;
1888 if (target_ep == NULL) {
1892 goto unexpected_return;
1896 case ISEQ_TYPE_EVAL: {
1898 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1899 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1900 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1901 t = ISEQ_BODY(is)->type;
1903 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1906 case ISEQ_TYPE_CLASS:
1915 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1916 if (target_ep == NULL) {
1920 goto unexpected_return;
1924 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1927 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1933 rb_bug(
"isns(throw): unsupported throw type");
1936 ec->tag->state = state;
1937 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1942 rb_num_t throw_state,
VALUE throwobj)
1944 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1945 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1948 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1951 return vm_throw_continue(ec, throwobj);
1958 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1964 int is_splat = flag & 0x01;
1967 const VALUE obj = ary;
1979 if (num + is_splat == 0) {
1982 else if (flag & 0x02) {
1987 for (i = 0; i < num -
len; i++) {
1992 for (j = 0; i < num; i++, j++) {
2014 for (; i < num -
len; i++) {
2018 for (rb_num_t j = 0; i < num; i++, j++) {
2019 *cfp->sp++ = ptr[
len - j - 1];
2023 for (rb_num_t j = 0; j < num; j++) {
2024 *cfp->sp++ = ptr[num - j - 1];
2039 int initial_capa = 2;
2041#if VM_CHECK_MODE > 0
2042 ccs->debug_sig = ~(
VALUE)ccs;
2044 ccs->capa = initial_capa;
2049 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2057 if (! vm_cc_markable(cc)) {
2061 if (UNLIKELY(ccs->len == ccs->capa)) {
2064 ccs = ruby_xrealloc(ccs, vm_ccs_alloc_size(ccs->capa));
2065#if VM_CHECK_MODE > 0
2066 ccs->debug_sig = ~(
VALUE)ccs;
2069 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2071 VM_ASSERT(ccs->len < ccs->capa);
2073 const int pos = ccs->len++;
2074 ccs->entries[pos].argc = vm_ci_argc(ci);
2075 ccs->entries[pos].flag = vm_ci_flag(ci);
2078 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2084#if VM_CHECK_MODE > 0
2088 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2089 for (
int i=0; i<ccs->len; i++) {
2090 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2091 ccs->entries[i].flag,
2092 ccs->entries[i].argc);
2093 rp(ccs->entries[i].cc);
2100 VM_ASSERT(vm_ccs_p(ccs));
2101 VM_ASSERT(ccs->len <= ccs->capa);
2103 for (
int i=0; i<ccs->len; i++) {
2106 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2107 VM_ASSERT(vm_cc_class_check(cc, klass));
2108 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2109 VM_ASSERT(!vm_cc_super_p(cc));
2110 VM_ASSERT(!vm_cc_refinement_p(cc));
2121 ASSERT_vm_locking();
2123 if (rb_multi_ractor_p()) {
2124 if (RCLASS_WRITABLE_CC_TBL(klass) != cc_tbl) {
2131 rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj);
2134 if (!ccs || !METHOD_ENTRY_INVALIDATED(ccs->cme)) {
2139 VALUE new_table = rb_vm_cc_table_dup(cc_tbl);
2140 rb_vm_cc_table_delete(new_table, mid);
2141 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), new_table);
2144 rb_vm_cc_table_delete(cc_tbl, mid);
2151 ASSERT_vm_locking();
2153 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2157 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2161 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2162 return &vm_empty_cc;
2165 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2166 const VALUE original_cc_table = cc_tbl;
2169 cc_tbl = rb_vm_cc_table_create(1);
2171 else if (rb_multi_ractor_p()) {
2172 cc_tbl = rb_vm_cc_table_dup(cc_tbl);
2175 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2184 if (UNLIKELY(rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj))) {
2189 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2193 cme = rb_check_overloaded_cme(cme, ci);
2195 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2196 vm_ccs_push(cc_tbl, mid, ccs, ci, cc);
2198 VM_ASSERT(vm_cc_cme(cc) != NULL);
2199 VM_ASSERT(cme->called_id == mid);
2200 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2202 if (original_cc_table != cc_tbl) {
2203 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), cc_tbl);
2215 cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
2223 if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj)) {
2225 const int ccs_len = ccs->len;
2227 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2229 vm_evict_cc(klass, cc_tbl, mid);
2234 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2239 unsigned int argc = vm_ci_argc(ci);
2240 unsigned int flag = vm_ci_flag(ci);
2242 for (
int i=0; i<ccs_len; i++) {
2243 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2244 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2245 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2247 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2249 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2250 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2252 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2253 VM_ASSERT(ccs_cc->klass == klass);
2254 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2270 const ID mid = vm_ci_mid(ci);
2272 const struct rb_callcache *cc = vm_lookup_cc(klass, ci, mid);
2278 if (rb_multi_ractor_p()) {
2281 cc = vm_lookup_cc(klass, ci, mid);
2285 cc = vm_populate_cc(klass, ci, mid);
2299 cc = vm_search_cc(klass, ci);
2302 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2303 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2304 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2305 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2306 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2314#if USE_DEBUG_COUNTER
2318 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2320#if OPT_INLINE_METHOD_CACHE
2324 if (cd_owner && cc != empty_cc) {
2328#if USE_DEBUG_COUNTER
2329 if (!old_cc || old_cc == empty_cc) {
2331 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2333 else if (old_cc == cc) {
2334 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2336 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2337 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2339 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2340 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2341 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2344 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2349 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2350 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2361#if OPT_INLINE_METHOD_CACHE
2362 if (LIKELY(vm_cc_class_check(cc, klass))) {
2363 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2364 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2365 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2366 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2367 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2368 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2372 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2375 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2379 return vm_search_method_slowpath0(cd_owner, cd, klass);
2386 VM_ASSERT(klass !=
Qfalse);
2389 const struct rb_callcache *cc = vm_search_method_fastpath(cd_owner, cd, klass);
2390 return vm_cc_cme(cc);
2396 return vm_search_method(cd_owner, cd, recv);
2399#if __has_attribute(transparent_union)
2412 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2413 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2414 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2415 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2416 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2417 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2420# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2423# define make_cfunc_type(f) (cfunc_type)(f)
2433 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2434 VM_ASSERT(callable_method_entry_p(me));
2436 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2440#if __has_attribute(transparent_union)
2441 return me->def->body.cfunc.func == func.anyargs;
2443 return me->def->body.cfunc.func == func;
2452 return me && METHOD_ENTRY_BASIC(me);
2458 VM_ASSERT(iseq != NULL);
2460 return check_cfunc(cme, func);
2466 return check_cfunc(me, func);
2472 return vm_method_cfunc_is(iseq, cd, recv, func);
2475#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2476#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2478#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2510opt_equality_specialized(
VALUE recv,
VALUE obj)
2512 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2513 goto compare_by_identity;
2515 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2516 goto compare_by_identity;
2519 goto compare_by_identity;
2528 return RBOOL(a == b);
2535 return rb_str_eql_internal(obj, recv);
2540 compare_by_identity:
2541 return RBOOL(recv == obj);
2547 VM_ASSERT(cd_owner != NULL);
2549 VALUE val = opt_equality_specialized(recv, obj);
2550 if (!UNDEF_P(val))
return val;
2552 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2556 return RBOOL(recv == obj);
2560#undef EQ_UNREDEFINED_P
2563NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2566opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2568 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2570 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2571 return RBOOL(recv == obj);
2581 VALUE val = opt_equality_specialized(recv, obj);
2582 if (!UNDEF_P(val)) {
2586 return opt_equality_by_mid_slowpath(recv, obj, mid);
2593 return opt_equality_by_mid(obj1, obj2, idEq);
2599 return opt_equality_by_mid(obj1, obj2, idEqlP);
2609 case VM_CHECKMATCH_TYPE_WHEN:
2611 case VM_CHECKMATCH_TYPE_RESCUE:
2613 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2616 case VM_CHECKMATCH_TYPE_CASE: {
2617 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2620 rb_bug(
"check_match: unreachable");
2626double_cmp_lt(
double a,
double b)
2628 return RBOOL(a < b);
2632double_cmp_le(
double a,
double b)
2634 return RBOOL(a <= b);
2638double_cmp_gt(
double a,
double b)
2640 return RBOOL(a > b);
2644double_cmp_ge(
double a,
double b)
2646 return RBOOL(a >= b);
2650static inline VALUE *
2655 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2656 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2658 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2659 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2660 int params = ISEQ_BODY(cfp->iseq)->param.size;
2663 bp += vm_ci_argc(ci);
2666 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2670#if VM_DEBUG_BP_CHECK
2671 if (bp != cfp->bp_check) {
2672 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2673 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2674 (
long)(bp - GET_EC()->vm_stack));
2675 rb_bug(
"vm_base_ptr: unreachable");
2688 return vm_base_ptr(cfp);
2703static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2708 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2710 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2716 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2719 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2720 int param = ISEQ_BODY(iseq)->param.size;
2721 int local = ISEQ_BODY(iseq)->local_table_size;
2722 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2728 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2729 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2730 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2731 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2732 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2733 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2734 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2735 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2739rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2741 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2742 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2743 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2744 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2745 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2746 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2747 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2748 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2752rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2754 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2755 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2756 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2757 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2758 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2759 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2760 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2763#define ALLOW_HEAP_ARGV (-2)
2764#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2769 vm_check_canary(GET_EC(), cfp->sp);
2775 int argc = calling->argc;
2777 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2781 VALUE *argv = cfp->sp - argc;
2785 cfp->sp -= argc - 1;
2786 cfp->sp[-1] = argv_ary;
2788 calling->heap_argv = argv_ary;
2794 if (max_args >= 0 &&
len + argc > max_args) {
2802 calling->argc +=
len - (max_args - argc + 1);
2803 len = max_args - argc + 1;
2812 calling->heap_argv = 0;
2814 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2816 for (i = 0; i <
len; i++) {
2817 *cfp->sp++ = ptr[i];
2829 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2830 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2831 const VALUE h = rb_hash_new_with_size(kw_len);
2832 VALUE *sp = cfp->sp;
2835 for (i=0; i<kw_len; i++) {
2836 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2840 cfp->sp -= kw_len - 1;
2841 calling->argc -= kw_len - 1;
2842 calling->kw_splat = 1;
2846vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2849 if (keyword_hash !=
Qnil) {
2851 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2854 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2860 keyword_hash = rb_hash_dup(keyword_hash);
2862 return keyword_hash;
2868 const struct rb_callinfo *restrict ci,
int max_args)
2870 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2871 if (IS_ARGS_KW_SPLAT(ci)) {
2873 VM_ASSERT(calling->kw_splat == 1);
2877 VALUE ary = cfp->sp[0];
2878 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2881 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2885 if (UNLIKELY(calling->heap_argv)) {
2887 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2888 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2889 calling->kw_splat = 0;
2897 VM_ASSERT(calling->kw_splat == 1);
2901 calling->kw_splat = 0;
2906 VM_ASSERT(calling->kw_splat == 0);
2910 VALUE ary = cfp->sp[0];
2912 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2917 VALUE last_hash, argv_ary;
2918 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2919 if (!IS_ARGS_KEYWORD(ci) &&
2922 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2927 calling->kw_splat = 1;
2933 if (!IS_ARGS_KEYWORD(ci) &&
2934 calling->argc > 0 &&
2936 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2943 cfp->sp[-1] = rb_hash_dup(last_hash);
2944 calling->kw_splat = 1;
2950 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2952 VM_ASSERT(calling->kw_splat == 1);
2953 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2958 calling->kw_splat = 0;
2964 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2966 VM_ASSERT(calling->kw_splat == 0);
2972 vm_caller_setup_arg_kw(cfp, calling, ci);
2976#define USE_OPT_HIST 0
2979#define OPT_HIST_MAX 64
2980static int opt_hist[OPT_HIST_MAX+1];
2984opt_hist_show_results_at_exit(
void)
2986 for (
int i=0; i<OPT_HIST_MAX; i++) {
2987 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2997 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2998 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2999 const int opt = calling->argc - lead_num;
3000 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3001 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3002 const int param = ISEQ_BODY(iseq)->param.size;
3003 const int local = ISEQ_BODY(iseq)->local_table_size;
3004 const int delta = opt_num - opt;
3006 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3009 if (opt_pc < OPT_HIST_MAX) {
3013 opt_hist[OPT_HIST_MAX]++;
3017 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
3025 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3026 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3027 const int opt = calling->argc - lead_num;
3028 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3030 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3033 if (opt_pc < OPT_HIST_MAX) {
3037 opt_hist[OPT_HIST_MAX]++;
3041 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3046 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
3047 VALUE *
const locals);
3054 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3055 int param_size = ISEQ_BODY(iseq)->param.size;
3056 int local_size = ISEQ_BODY(iseq)->local_table_size;
3059 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3061 local_size = local_size + vm_ci_argc(calling->cd->ci);
3062 param_size = param_size + vm_ci_argc(calling->cd->ci);
3064 cfp->sp[0] = (
VALUE)calling->cd->ci;
3066 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
3076 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
3077 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
3079 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3080 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3082 const int ci_kw_len = kw_arg->keyword_len;
3083 const VALUE *
const ci_keywords = kw_arg->keywords;
3084 VALUE *argv = cfp->sp - calling->argc;
3085 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3086 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3088 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3089 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3091 int param = ISEQ_BODY(iseq)->param.size;
3092 int local = ISEQ_BODY(iseq)->local_table_size;
3093 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3100 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
3103 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
3104 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3106 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3107 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3108 VALUE *
const argv = cfp->sp - calling->argc;
3109 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3112 for (i=0; i<kw_param->num; i++) {
3113 klocals[i] = kw_param->default_values[i];
3120 int param = ISEQ_BODY(iseq)->param.size;
3121 int local = ISEQ_BODY(iseq)->local_table_size;
3122 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3132 cfp->sp -= (calling->argc + 1);
3133 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3134 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3143 set_table *dup_check_table = vm->unused_block_warning_table;
3153 .v = (
VALUE)cme->def,
3157 if (!strict_unused_block) {
3158 key = (st_data_t)cme->def->original_id;
3160 if (set_table_lookup(dup_check_table, key)) {
3170 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3175 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3176 fprintf(stderr,
"key:%p\n", (
void *)key);
3180 if (set_insert(dup_check_table, key)) {
3185 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3187 if (!
NIL_P(m_loc)) {
3188 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3192 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3199 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3204 VM_ASSERT((vm_ci_argc(ci), 1));
3205 VM_ASSERT(vm_cc_cme(cc) != NULL);
3207 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3208 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3209 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3210 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3213 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3214 if (LIKELY(rb_simple_iseq_p(iseq))) {
3216 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3217 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3219 if (calling->argc != lead_num) {
3220 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3224 VM_ASSERT(cc == calling->cc);
3226 if (vm_call_iseq_optimizable_p(ci, cc)) {
3227 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) && ruby_vm_c_events_enabled == 0) {
3228 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3229 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3230 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3233 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3238 else if (rb_iseq_only_optparam_p(iseq)) {
3241 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3242 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3244 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3245 const int argc = calling->argc;
3246 const int opt = argc - lead_num;
3248 if (opt < 0 || opt > opt_num) {
3249 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3252 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3253 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3254 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3255 vm_call_cacheable(ci, cc));
3258 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3259 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3260 vm_call_cacheable(ci, cc));
3264 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3265 for (
int i=argc; i<lead_num + opt_num; i++) {
3268 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3270 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3271 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3272 const int argc = calling->argc;
3273 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3275 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3278 if (argc - kw_arg->keyword_len == lead_num) {
3279 const int ci_kw_len = kw_arg->keyword_len;
3280 const VALUE *
const ci_keywords = kw_arg->keywords;
3282 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3284 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3285 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3287 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3288 vm_call_cacheable(ci, cc));
3293 else if (argc == lead_num) {
3295 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3296 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3298 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3300 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3301 vm_call_cacheable(ci, cc));
3327 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3328 bool can_fastpath =
true;
3330 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3332 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3333 ci = vm_ci_new_runtime(
3340 ci = forward_cd->caller_ci;
3342 can_fastpath =
false;
3346 if (!vm_ci_markable(ci)) {
3347 ci = vm_ci_new_runtime(
3352 can_fastpath =
false;
3354 argv[param_size - 1] = (
VALUE)ci;
3355 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3359 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3386 const VALUE * lep = VM_CF_LEP(cfp);
3392 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3397 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3401 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3403 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3404 VALUE * to = cfp->sp - 1;
3408 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3413 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3415 cfp->sp = to + argc;
3434 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3437 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3438 int param_size = ISEQ_BODY(iseq)->param.size;
3439 int local_size = ISEQ_BODY(iseq)->local_table_size;
3441 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3443 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3444 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3450 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3453 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3454 int param_size = ISEQ_BODY(iseq)->param.size;
3455 int local_size = ISEQ_BODY(iseq)->local_table_size;
3457 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3460 local_size = local_size + vm_ci_argc(calling->cd->ci);
3461 param_size = param_size + vm_ci_argc(calling->cd->ci);
3463 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3464 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3469 int opt_pc,
int param_size,
int local_size)
3474 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3475 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3478 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3484 int opt_pc,
int param_size,
int local_size)
3486 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3487 VALUE *argv = cfp->sp - calling->argc;
3488 VALUE *sp = argv + param_size;
3489 cfp->sp = argv - 1 ;
3491 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3492 calling->block_handler, (
VALUE)me,
3493 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3494 local_size - param_size,
3495 ISEQ_BODY(iseq)->stack_max);
3504 VALUE *argv = cfp->sp - calling->argc;
3506 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3507 VALUE *src_argv = argv;
3508 VALUE *sp_orig, *sp;
3509 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3511 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3512 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3513 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3514 dst_captured->code.val = src_captured->code.val;
3515 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3516 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3519 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3523 vm_pop_frame(ec, cfp, cfp->ep);
3526 sp_orig = sp = cfp->sp;
3529 sp[0] = calling->recv;
3533 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3534 *sp++ = src_argv[i];
3537 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3538 calling->recv, calling->block_handler, (
VALUE)me,
3539 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3540 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3541 ISEQ_BODY(iseq)->stack_max);
3549ractor_unsafe_check(
void)
3551 if (!rb_ractor_main_p()) {
3552 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3559 ractor_unsafe_check();
3567 ractor_unsafe_check();
3569 return (*f)(argc, argv, recv);
3575 ractor_unsafe_check();
3583 ractor_unsafe_check();
3585 return (*f)(recv, argv[0]);
3591 ractor_unsafe_check();
3593 return (*f)(recv, argv[0], argv[1]);
3599 ractor_unsafe_check();
3601 return (*f)(recv, argv[0], argv[1], argv[2]);
3607 ractor_unsafe_check();
3609 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3615 ractor_unsafe_check();
3616 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3617 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3623 ractor_unsafe_check();
3624 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3625 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3631 ractor_unsafe_check();
3632 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3633 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3639 ractor_unsafe_check();
3640 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3641 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3647 ractor_unsafe_check();
3648 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3649 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3655 ractor_unsafe_check();
3656 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3657 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3663 ractor_unsafe_check();
3664 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3665 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3671 ractor_unsafe_check();
3672 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3673 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3679 ractor_unsafe_check();
3680 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3681 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3687 ractor_unsafe_check();
3688 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3689 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3695 ractor_unsafe_check();
3696 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3697 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3711 return (*f)(argc, argv, recv);
3725 return (*f)(recv, argv[0]);
3732 return (*f)(recv, argv[0], argv[1]);
3739 return (*f)(recv, argv[0], argv[1], argv[2]);
3746 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3752 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3753 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3759 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3760 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3766 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3767 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3773 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3774 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3780 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3781 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3787 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3788 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3794 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3795 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3801 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3802 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3808 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3809 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3815 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3816 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3822 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3823 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3829 const int ov_flags = RAISED_STACKOVERFLOW;
3830 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3831 if (rb_ec_raised_p(ec, ov_flags)) {
3832 rb_ec_raised_reset(ec, ov_flags);
3838#define CHECK_CFP_CONSISTENCY(func) \
3839 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3840 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3846#if VM_DEBUG_VERIFY_METHOD_CACHE
3847 switch (me->def->type) {
3848 case VM_METHOD_TYPE_CFUNC:
3849 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3851# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3853 METHOD_BUG(ATTRSET);
3855 METHOD_BUG(BMETHOD);
3858 METHOD_BUG(OPTIMIZED);
3859 METHOD_BUG(MISSING);
3860 METHOD_BUG(REFINED);
3864 rb_bug(
"wrong method type: %d", me->def->type);
3867 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3874 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3881 VALUE recv = calling->recv;
3882 VALUE block_handler = calling->block_handler;
3883 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3885 if (UNLIKELY(calling->kw_splat)) {
3886 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3889 VM_ASSERT(reg_cfp == ec->cfp);
3891 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3894 vm_push_frame(ec, NULL, frame_type, recv,
3895 block_handler, (
VALUE)me,
3896 0, ec->cfp->sp, 0, 0);
3898 int len = cfunc->argc;
3901 reg_cfp->sp = stack_bottom;
3902 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3904 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3906 rb_vm_pop_frame(ec);
3908 VM_ASSERT(ec->cfp->sp == stack_bottom);
3910 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3911 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3921 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3923 VALUE *sp = ec->cfp->sp;
3924 VALUE recv = *(sp - recv_idx - 1);
3925 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3926 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3927#if VM_CHECK_MODE > 0
3929 *(GET_EC()->cfp->sp) =
Qfalse;
3931 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3936rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3938 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3944 int argc = calling->argc;
3945 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3946 VALUE *argv = &stack_bottom[1];
3948 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3955 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3957 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3959 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3960 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3963 VALUE *stack_bottom = reg_cfp->sp - 2;
3965 VM_ASSERT(calling->argc == 1);
3969 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3972 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3974 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3981 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3984 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3985 return vm_call_cfunc_other(ec, reg_cfp, calling);
3989 calling->kw_splat = 0;
3991 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3992 VALUE *sp = stack_bottom;
3993 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3994 for(i = 0; i < argc; i++) {
3999 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
4005 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
4006 VALUE argv_ary = reg_cfp->sp[-1];
4010 int argc_offset = 0;
4012 if (UNLIKELY(argc > 0 &&
4014 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
4016 return vm_call_cfunc_other(ec, reg_cfp, calling);
4020 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
4026 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
4027 VALUE keyword_hash = reg_cfp->sp[-1];
4030 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
4033 return vm_call_cfunc_other(ec, reg_cfp, calling);
4040 RB_DEBUG_COUNTER_INC(ccf_cfunc);
4042 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4043 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
4045 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
4046 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
4048 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
4050 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
4051 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
4055 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
4056 return vm_call_cfunc_other(ec, reg_cfp, calling);
4063 RB_DEBUG_COUNTER_INC(ccf_ivar);
4065 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
4072 RB_DEBUG_COUNTER_INC(ccf_attrset);
4073 VALUE val = *(cfp->sp - 1);
4076 shape_id_t dest_shape_id;
4077 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
4078 ID id = vm_cc_cme(cc)->def->body.attr.id;
4079 rb_check_frozen(obj);
4080 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
4088 res = vm_setivar_class(obj,
id, val, dest_shape_id, index);
4089 if (!UNDEF_P(res)) {
4096 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
4097 if (!UNDEF_P(res)) {
4102 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
4110 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4120 VALUE procv = cme->def->body.bmethod.proc;
4123 cme->def->body.bmethod.defined_ractor_id != rb_ractor_id(rb_ec_ractor_ptr(ec))) {
4124 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4128 GetProcPtr(procv, proc);
4129 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4139 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4143 VALUE procv = cme->def->body.bmethod.proc;
4146 cme->def->body.bmethod.defined_ractor_id != rb_ractor_id(rb_ec_ractor_ptr(ec))) {
4147 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4151 GetProcPtr(procv, proc);
4152 const struct rb_block *block = &proc->block;
4154 while (vm_block_type(block) == block_type_proc) {
4155 block = vm_proc_block(block->as.proc);
4157 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4160 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4161 VALUE *
const argv = cfp->sp - calling->argc;
4162 const int arg_size = ISEQ_BODY(iseq)->param.size;
4165 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4166 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4169 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4174 vm_push_frame(ec, iseq,
4175 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4177 VM_GUARDED_PREV_EP(captured->ep),
4179 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4181 ISEQ_BODY(iseq)->local_table_size - arg_size,
4182 ISEQ_BODY(iseq)->stack_max);
4190 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4194 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4195 if (UNLIKELY(calling->heap_argv)) {
4200 argc = calling->argc;
4203 cfp->sp += - argc - 1;
4206 return vm_call_bmethod_body(ec, calling, argv);
4212 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4216 VALUE procv = cme->def->body.bmethod.proc;
4218 GetProcPtr(procv, proc);
4219 const struct rb_block *block = &proc->block;
4221 while (vm_block_type(block) == block_type_proc) {
4222 block = vm_proc_block(block->as.proc);
4224 if (vm_block_type(block) == block_type_iseq) {
4225 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4226 return vm_call_iseq_bmethod(ec, cfp, calling);
4229 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4230 return vm_call_noniseq_bmethod(ec, cfp, calling);
4234rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4236 VALUE klass = current_class;
4244 while (
RTEST(klass)) {
4246 if (owner == target_owner) {
4252 return current_class;
4261 if (orig_me->defined_class == 0) {
4262 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4263 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4264 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4266 if (me->def->reference_count == 1) {
4267 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4271 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4279 VM_ASSERT(callable_method_entry_p(cme));
4286 return aliased_callable_method_entry(me);
4292 calling->cc = &VM_CC_ON_STACK(
Qundef,
4295 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4297 return vm_call_method_each_type(ec, cfp, calling);
4300static enum method_missing_reason
4303 enum method_missing_reason stat = MISSING_NOENTRY;
4304 if (vm_ci_flag(ci) & VM_CALL_VCALL && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) stat |= MISSING_VCALL;
4305 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4306 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4316 ASSUME(calling->argc >= 0);
4318 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4319 int argc = calling->argc;
4320 VALUE recv = calling->recv;
4323 flags |= VM_CALL_OPT_SEND;
4325 if (UNLIKELY(! mid)) {
4326 mid = idMethodMissing;
4327 missing_reason = ci_missing_reason(ci);
4328 ec->method_missing_reason = missing_reason;
4331 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4332 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4333 rb_ary_unshift(argv_ary, symbol);
4336 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4337 VALUE exc = rb_make_no_method_exception(
4359 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4362 argc = ++calling->argc;
4364 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4367 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4368 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4369 VALUE exc = rb_make_no_method_exception(
4382 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4388 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4389 calling->cd = &new_fcd.cd;
4393 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4394 new_fcd.caller_ci = caller_ci;
4397 calling->cc = &VM_CC_ON_STACK(klass,
4399 { .method_missing_reason = missing_reason },
4400 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4402 if (flags & VM_CALL_FCALL) {
4403 return vm_call_method(ec, reg_cfp, calling);
4407 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4409 if (vm_cc_cme(cc) != NULL) {
4410 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4411 case METHOD_VISI_PUBLIC:
4412 return vm_call_method_each_type(ec, reg_cfp, calling);
4413 case METHOD_VISI_PRIVATE:
4414 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4416 case METHOD_VISI_PROTECTED:
4417 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4420 VM_UNREACHABLE(vm_call_method);
4422 return vm_call_method_missing(ec, reg_cfp, calling);
4425 return vm_call_method_nome(ec, reg_cfp, calling);
4435 i = calling->argc - 1;
4437 if (calling->argc == 0) {
4438 rb_raise(rb_eArgError,
"no method name given");
4462 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4468 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4470 int flags = VM_CALL_FCALL;
4474 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4475 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4477 flags |= VM_CALL_ARGS_SPLAT;
4478 if (calling->kw_splat) {
4479 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4480 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4481 calling->kw_splat = 0;
4483 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4486 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4487 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4493 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4494 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4500 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4503 int flags = vm_ci_flag(ci);
4505 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4506 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4507 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4508 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4509 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4510 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4513 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4514 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4519 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4521 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4523 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4524 unsigned int argc, flag;
4526 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4527 argc = ++calling->argc;
4530 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4531 vm_check_canary(ec, reg_cfp->sp);
4535 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4538 ec->method_missing_reason = reason;
4542 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4548 if (!(flag & VM_CALL_FORWARDING)) {
4549 calling->cd = &new_fcd.cd;
4553 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4554 new_fcd.caller_ci = caller_ci;
4558 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4559 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4560 return vm_call_method(ec, reg_cfp, calling);
4566 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4577 return vm_call_method_nome(ec, cfp, calling);
4579 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4580 cme->def->body.refined.orig_me) {
4581 cme = refined_method_callable_without_refinement(cme);
4584 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4586 return vm_call_method_each_type(ec, cfp, calling);
4590find_refinement(
VALUE refinements,
VALUE klass)
4592 if (
NIL_P(refinements)) {
4595 return rb_hash_lookup(refinements, klass);
4604 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4605 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4608 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4609 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4613 }
while (cfp->iseq != local_iseq);
4624 if (orig_me->defined_class == 0) {
4632 VM_ASSERT(callable_method_entry_p(cme));
4634 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4644 ID mid = vm_ci_mid(calling->cd->ci);
4645 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4649 for (; cref; cref = CREF_NEXT(cref)) {
4650 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4651 if (
NIL_P(refinement))
continue;
4654 rb_callable_method_entry(refinement, mid);
4657 if (vm_cc_call(cc) == vm_call_super_method) {
4660 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4665 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4666 cme->def != ref_me->def) {
4669 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4678 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4679 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4694 if (calling->cd->cc) {
4695 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4697 return vm_call_method(ec, cfp, calling);
4700 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4701 calling->cc= ref_cc;
4702 return vm_call_method(ec, cfp, calling);
4706 return vm_call_method_nome(ec, cfp, calling);
4712NOINLINE(
static VALUE
4720 int argc = calling->argc;
4723 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4726 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4732 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4735 VALUE procval = calling->recv;
4736 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4742 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4744 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4747 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4748 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4751 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4752 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4753 return vm_call_general(ec, reg_cfp, calling);
4760 VALUE recv = calling->recv;
4763 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4764 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4766 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4767 return internal_RSTRUCT_GET(recv,
off);
4773 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4775 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4783 VALUE recv = calling->recv;
4786 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4787 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4789 rb_check_frozen(recv);
4791 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4792 internal_RSTRUCT_SET(recv,
off, val);
4800 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4802 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4810#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4811 if (UNLIKELY(ruby_vm_c_events_enabled > 0)) { \
4812 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4813 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4815 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4816 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4827 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4828 case OPTIMIZED_METHOD_TYPE_SEND:
4829 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4830 return vm_call_opt_send(ec, cfp, calling);
4831 case OPTIMIZED_METHOD_TYPE_CALL:
4832 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4833 return vm_call_opt_call(ec, cfp, calling);
4834 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4835 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4836 return vm_call_opt_block_call(ec, cfp, calling);
4837 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4838 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4842 VM_CALL_METHOD_ATTR(v,
4843 vm_call_opt_struct_aref(ec, cfp, calling),
4844 set_vm_cc_ivar(cc); \
4845 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4848 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4849 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4853 VM_CALL_METHOD_ATTR(v,
4854 vm_call_opt_struct_aset(ec, cfp, calling),
4855 set_vm_cc_ivar(cc); \
4856 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4860 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4872 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4874 switch (cme->def->type) {
4875 case VM_METHOD_TYPE_ISEQ:
4876 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4877 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4878 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4881 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4882 return vm_call_iseq_setup(ec, cfp, calling);
4885 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4886 case VM_METHOD_TYPE_CFUNC:
4887 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4888 return vm_call_cfunc(ec, cfp, calling);
4890 case VM_METHOD_TYPE_ATTRSET:
4891 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4895 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4897 if (vm_cc_markable(cc)) {
4898 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4899 VM_CALL_METHOD_ATTR(v,
4900 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4901 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4907 VM_CALLCACHE_UNMARKABLE |
4908 VM_CALLCACHE_ON_STACK,
4914 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4919 VM_CALL_METHOD_ATTR(v,
4920 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4921 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4925 case VM_METHOD_TYPE_IVAR:
4926 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4928 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4929 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4930 VM_CALL_METHOD_ATTR(v,
4931 vm_call_ivar(ec, cfp, calling),
4932 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4935 case VM_METHOD_TYPE_MISSING:
4936 vm_cc_method_missing_reason_set(cc, 0);
4937 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4938 return vm_call_method_missing(ec, cfp, calling);
4940 case VM_METHOD_TYPE_BMETHOD:
4941 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4942 return vm_call_bmethod(ec, cfp, calling);
4944 case VM_METHOD_TYPE_ALIAS:
4945 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4946 return vm_call_alias(ec, cfp, calling);
4948 case VM_METHOD_TYPE_OPTIMIZED:
4949 return vm_call_optimized(ec, cfp, calling, ci, cc);
4951 case VM_METHOD_TYPE_UNDEF:
4954 case VM_METHOD_TYPE_ZSUPER:
4955 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4957 case VM_METHOD_TYPE_REFINED:
4960 return vm_call_refined(ec, cfp, calling);
4963 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4973 const int stat = ci_missing_reason(ci);
4975 if (vm_ci_mid(ci) == idMethodMissing) {
4976 if (UNLIKELY(calling->heap_argv)) {
4981 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4982 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4986 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4998 VALUE defined_class = me->defined_class;
4999 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
5000 return NIL_P(refined_class) ? defined_class : refined_class;
5009 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
5011 if (vm_cc_cme(cc) != NULL) {
5012 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
5013 case METHOD_VISI_PUBLIC:
5014 return vm_call_method_each_type(ec, cfp, calling);
5016 case METHOD_VISI_PRIVATE:
5017 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
5018 enum method_missing_reason stat = MISSING_PRIVATE;
5019 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
5021 vm_cc_method_missing_reason_set(cc, stat);
5022 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
5023 return vm_call_method_missing(ec, cfp, calling);
5025 return vm_call_method_each_type(ec, cfp, calling);
5027 case METHOD_VISI_PROTECTED:
5028 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
5029 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
5031 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
5032 return vm_call_method_missing(ec, cfp, calling);
5036 VM_ASSERT(vm_cc_cme(cc) != NULL);
5039 calling->cc = &cc_on_stack;
5040 return vm_call_method_each_type(ec, cfp, calling);
5043 return vm_call_method_each_type(ec, cfp, calling);
5046 rb_bug(
"unreachable");
5050 return vm_call_method_nome(ec, cfp, calling);
5057 RB_DEBUG_COUNTER_INC(ccf_general);
5058 return vm_call_method(ec, reg_cfp, calling);
5064 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
5065 VM_ASSERT(cc != vm_cc_empty());
5067 *(vm_call_handler *)&cc->call_ = vm_call_general;
5073 RB_DEBUG_COUNTER_INC(ccf_super_method);
5078 if (ec == NULL) rb_bug(
"unreachable");
5081 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
5082 return vm_call_method(ec, reg_cfp, calling);
5088vm_search_normal_superclass(
VALUE klass)
5093 klass =
RBASIC(klass)->klass;
5095 klass = RCLASS_ORIGIN(klass);
5099NORETURN(
static void vm_super_outside(
void));
5102vm_super_outside(
void)
5108empty_cc_for_super(
void)
5110 return &vm_empty_cc_for_super;
5116 VALUE current_defined_class;
5123 current_defined_class = vm_defined_class_for_protected_call(me);
5126 reg_cfp->iseq != method_entry_iseqptr(me) &&
5129 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5133 "self has wrong type to call super in this context: "
5134 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5139 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5141 "implicit argument passing of super from method defined"
5142 " by define_method() is not supported."
5143 " Specify all arguments explicitly.");
5146 ID mid = me->def->original_id;
5148 if (!vm_ci_markable(cd->ci)) {
5149 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5153 cd->ci = vm_ci_new_runtime(mid,
5156 vm_ci_kwarg(cd->ci));
5163 VALUE klass = vm_search_normal_superclass(me->defined_class);
5167 cc = vm_cc_new(
Qundef, NULL, vm_call_method_missing, cc_type_super);
5171 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5175 if (cached_cme == NULL) {
5177 cd->cc = empty_cc_for_super();
5179 else if (cached_cme->called_id != mid) {
5182 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5186 cd->cc = cc = empty_cc_for_super();
5190 switch (cached_cme->def->type) {
5192 case VM_METHOD_TYPE_REFINED:
5194 case VM_METHOD_TYPE_ATTRSET:
5195 case VM_METHOD_TYPE_IVAR:
5196 vm_cc_call_set(cc, vm_call_super_method);
5204 VM_ASSERT((vm_cc_cme(cc),
true));
5212block_proc_is_lambda(
const VALUE procval)
5217 GetProcPtr(procval, proc);
5218 return proc->is_lambda;
5228 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5231 int is_lambda = FALSE;
5232 VALUE val, arg, blockarg;
5234 const struct vm_ifunc *ifunc = captured->code.ifunc;
5239 else if (argc == 0) {
5246 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5248 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5250 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5253 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5256 VM_GUARDED_PREV_EP(captured->ep),
5258 0, ec->cfp->sp, 0, 0);
5259 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5260 rb_vm_pop_frame(ec);
5268 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5274 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5283 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5285 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5293vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5295 VALUE ary, arg0 = argv[0];
5300 VM_ASSERT(argv[0] == arg0);
5308 if (rb_simple_iseq_p(iseq)) {
5312 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5314 if (arg_setup_type == arg_setup_block &&
5315 calling->argc == 1 &&
5316 ISEQ_BODY(iseq)->param.flags.has_lead &&
5317 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5318 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5319 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5322 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5323 if (arg_setup_type == arg_setup_block) {
5324 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5326 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5327 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5328 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5330 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5331 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5335 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5342 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5351 calling = &calling_entry;
5352 calling->argc = argc;
5353 calling->block_handler = block_handler;
5354 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5356 calling->heap_argv = 0;
5358 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5360 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5368 bool is_lambda,
VALUE block_handler)
5371 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5372 const int arg_size = ISEQ_BODY(iseq)->param.size;
5373 VALUE *
const rsp = GET_SP() - calling->argc;
5374 VALUE *
const argv = rsp;
5375 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5376 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5380 vm_push_frame(ec, iseq,
5383 VM_GUARDED_PREV_EP(captured->ep), 0,
5384 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5386 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5394 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5396 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5397 int flags = vm_ci_flag(ci);
5399 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5400 ((calling->argc == 0) ||
5401 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5402 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5403 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5404 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5406 if (UNLIKELY(calling->heap_argv)) {
5407#if VM_ARGC_STACK_MAX < 0
5409 rb_raise(rb_eArgError,
"no receiver given");
5415 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5416 reg_cfp->sp[-2] = calling->recv;
5417 flags |= VM_CALL_ARGS_SPLAT;
5420 if (calling->argc < 1) {
5421 rb_raise(rb_eArgError,
"no receiver given");
5423 calling->recv = TOPN(--calling->argc);
5425 if (calling->kw_splat) {
5426 flags |= VM_CALL_KW_SPLAT;
5430 if (calling->argc < 1) {
5431 rb_raise(rb_eArgError,
"no receiver given");
5433 calling->recv = TOPN(--calling->argc);
5436 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5442 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5447 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5448 argc = calling->argc;
5449 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5455vm_proc_to_block_handler(
VALUE procval)
5457 const struct rb_block *block = vm_proc_block(procval);
5459 switch (vm_block_type(block)) {
5460 case block_type_iseq:
5461 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5462 case block_type_ifunc:
5463 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5464 case block_type_symbol:
5465 return VM_BH_FROM_SYMBOL(block->as.symbol);
5466 case block_type_proc:
5467 return VM_BH_FROM_PROC(block->as.proc);
5469 VM_UNREACHABLE(vm_yield_with_proc);
5476 bool is_lambda,
VALUE block_handler)
5478 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5479 VALUE proc = VM_BH_TO_PROC(block_handler);
5480 is_lambda = block_proc_is_lambda(proc);
5481 block_handler = vm_proc_to_block_handler(proc);
5484 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5490 bool is_lambda,
VALUE block_handler)
5494 bool is_lambda,
VALUE block_handler);
5496 switch (vm_block_handler_type(block_handler)) {
5497 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5498 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5499 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5500 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5501 default: rb_bug(
"vm_invoke_block: unreachable");
5504 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5508vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5515 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5518 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5519 captured->code.iseq = blockiseq;
5521 return rb_vm_make_proc(ec, captured,
rb_cProc);
5525vm_once_exec(
VALUE iseq)
5532vm_once_clear(
VALUE data)
5535 is->once.running_thread = NULL;
5547 args[0] = obj; args[1] =
Qfalse;
5549 if (!UNDEF_P(r) &&
RTEST(r)) {
5561 enum defined_type
type = (
enum defined_type)op_type;
5568 return rb_gvar_defined(
SYM2ID(obj));
5570 case DEFINED_CVAR: {
5571 const rb_cref_t *cref = vm_get_cref(GET_EP());
5572 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5577 case DEFINED_CONST_FROM: {
5578 bool allow_nil =
type == DEFINED_CONST;
5580 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5585 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5587 case DEFINED_METHOD:{
5592 switch (METHOD_ENTRY_VISI(me)) {
5593 case METHOD_VISI_PRIVATE:
5595 case METHOD_VISI_PROTECTED:
5599 case METHOD_VISI_PUBLIC:
5603 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5607 return check_respond_to_missing(obj, v);
5612 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5616 case DEFINED_ZSUPER:
5621 VALUE klass = vm_search_normal_superclass(me->defined_class);
5622 if (!klass)
return false;
5624 ID id = me->def->original_id;
5631 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5633 rb_bug(
"unimplemented defined? type (VM)");
5643 return vm_defined(ec, reg_cfp, op_type, obj, v);
5647vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5650 const VALUE *ep = reg_ep;
5651 for (i = 0; i < lv; i++) {
5652 ep = GET_PREV_EP(ep);
5658vm_get_special_object(
const VALUE *
const reg_ep,
5659 enum vm_special_object_type
type)
5662 case VM_SPECIAL_OBJECT_VMCORE:
5663 return rb_mRubyVMFrozenCore;
5664 case VM_SPECIAL_OBJECT_CBASE:
5665 return vm_get_cbase(reg_ep);
5666 case VM_SPECIAL_OBJECT_CONST_BASE:
5667 return vm_get_const_base(reg_ep);
5669 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5676rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5678 return vm_get_special_object(reg_ep,
type);
5684 const VALUE ary2 = ary2st;
5685 VALUE tmp1 = rb_check_to_array(ary1);
5686 VALUE tmp2 = rb_check_to_array(ary2);
5707 const VALUE ary2 = ary2st;
5709 if (
NIL_P(ary2))
return ary1;
5711 VALUE tmp2 = rb_check_to_array(ary2);
5726 return vm_concat_array(ary1, ary2st);
5730rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5732 return vm_concat_to_array(ary1, ary2st);
5741 VALUE tmp = rb_check_to_array(ary);
5745 else if (
RTEST(flag)) {
5758 return vm_splat_array(flag, ary);
5764 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5766 if (flag & VM_CHECKMATCH_ARRAY) {
5770 for (i = 0; i < n; i++) {
5772 VALUE c = check_match(ec, v, target,
type);
5781 return check_match(ec, pattern, target,
type);
5788 return vm_check_match(ec, target, pattern, flag);
5792vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5794 const VALUE kw_bits = *(ep - bits);
5797 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5798 if ((idx < VM_KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5811 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5812 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5813 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5814 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5818 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5821 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5824 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5827 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5834vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5839 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5840 return rb_public_const_get_at(cbase,
id);
5848vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5853 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5858 "superclass mismatch for class %"PRIsVALUE
"",
5871vm_check_if_module(
ID id,
VALUE mod)
5890vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5893 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5901vm_declare_module(
ID id,
VALUE cbase)
5907NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5911 VALUE name = rb_id2str(
id);
5912 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5914 VALUE location = rb_const_source_location_at(cbase,
id);
5915 if (!
NIL_P(location)) {
5916 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5917 " previous definition of %"PRIsVALUE
" was here",
5924vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5928 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5930 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5934 vm_check_if_namespace(cbase);
5939 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5940 if (!vm_check_if_class(
id, flags, super, klass))
5941 unmatched_redefinition(
"class", cbase,
id, klass);
5945 return vm_declare_class(
id, flags, cbase, super);
5950vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5954 vm_check_if_namespace(cbase);
5955 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5956 if (!vm_check_if_module(
id, mod))
5957 unmatched_redefinition(
"module", cbase,
id, mod);
5961 return vm_declare_module(
id, cbase);
5966vm_find_or_create_class_by_id(
ID id,
5971 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5974 case VM_DEFINECLASS_TYPE_CLASS:
5976 return vm_define_class(
id, flags, cbase, super);
5978 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5982 case VM_DEFINECLASS_TYPE_MODULE:
5984 return vm_define_module(
id, flags, cbase);
5987 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5991static rb_method_visibility_t
5996 if (!vm_env_cref_by_cref(cfp->ep)) {
5997 return METHOD_VISI_PUBLIC;
6000 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
6009 if (!vm_env_cref_by_cref(cfp->ep)) {
6013 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
6021 rb_method_visibility_t visi;
6026 visi = METHOD_VISI_PUBLIC;
6029 klass = CREF_CLASS_FOR_DEFINITION(cref);
6030 visi = vm_scope_visibility_get(ec);
6037 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
6040 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
6043 if (!is_singleton && vm_scope_module_func_check(ec)) {
6045 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
6054rb_vm_untag_block_handler(
VALUE block_handler)
6056 if (VM_BLOCK_HANDLER_NONE == block_handler)
return Qnil;
6058 switch (vm_block_handler_type(block_handler)) {
6059 case block_handler_type_iseq:
6060 case block_handler_type_ifunc: {
6062 return captured->code.val;
6064 case block_handler_type_proc:
6065 case block_handler_type_symbol:
6066 return block_handler;
6068 rb_bug(
"rb_vm_untag_block_handler: unreachable");
6075 return rb_vm_untag_block_handler(VM_CF_BLOCK_HANDLER(reg_cfp));
6084 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
6086 if (block_handler == VM_BLOCK_HANDLER_NONE) {
6087 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
6090 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
6094enum method_explorer_type {
6096 mexp_search_invokeblock,
6105 VALUE block_handler,
6106 enum method_explorer_type method_explorer
6111 int argc = vm_ci_argc(ci);
6112 VALUE recv = TOPN(argc);
6114 .block_handler = block_handler,
6115 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
6121 switch (method_explorer) {
6122 case mexp_search_method:
6123 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
6124 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6126 case mexp_search_super:
6127 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
6128 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6130 case mexp_search_invokeblock:
6131 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6141 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6142 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6155 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6157 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6159 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6171 VALUE bh = VM_BLOCK_HANDLER_NONE;
6172 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6182 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6183 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6196 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6198 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6200 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6212 VALUE bh = VM_BLOCK_HANDLER_NONE;
6213 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6241 if (check_method_basic_definition(cme)) {
6250 if (check_cfunc(cme, rb_mod_to_s)) {
6256 val = rb_mod_to_s(recv);
6262 if (check_cfunc(cme, rb_nil_to_s)) {
6263 return rb_nil_to_s(recv);
6267 if (check_cfunc(cme, rb_true_to_s)) {
6268 return rb_true_to_s(recv);
6272 if (check_cfunc(cme, rb_false_to_s)) {
6273 return rb_false_to_s(recv);
6277 if (check_cfunc(cme, rb_int_to_s)) {
6278 return rb_fix_to_s(recv);
6290 return vm_objtostring(iseq, recv, cd);
6294vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6296 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6305vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6307 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6316vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6318 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6332 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6336 VALUE args[1] = {target};
6339 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6342 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6349 return vm_opt_duparray_include_p(ec, ary, target);
6355 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6356 if (array_len == 0) {
6360 VALUE result = *ptr;
6361 rb_snum_t i = array_len - 1;
6363 const VALUE v = *++ptr;
6364 if (OPTIMIZED_CMP(v, result) > 0) {
6379 return vm_opt_newarray_max(ec, array_len, ptr);
6385 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6386 if (array_len == 0) {
6390 VALUE result = *ptr;
6391 rb_snum_t i = array_len - 1;
6393 const VALUE v = *++ptr;
6394 if (OPTIMIZED_CMP(v, result) < 0) {
6409 return vm_opt_newarray_min(ec, array_len, ptr);
6416 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6417 return rb_ary_hash_values(array_len, ptr);
6427 return vm_opt_newarray_hash(ec, array_len, ptr);
6436 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6437 struct RArray fake_ary = {RBASIC_INIT};
6438 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, array_len);
6442 VALUE args[1] = {target};
6450 return vm_opt_newarray_include_p(ec, array_len,
ptr, target);
6456 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6457 struct RArray fake_ary = {RBASIC_INIT};
6458 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, array_len);
6459 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6469 if (!UNDEF_P(buffer)) {
6470 args[1] = rb_hash_new_with_size(1);
6471 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6476 return rb_vm_call_with_refinements(ec,
rb_ary_new4(array_len,
ptr), idPack, argc, args, kw_splat);
6483 return vm_opt_newarray_pack_buffer(ec, array_len,
ptr, fmt, buffer);
6489 return vm_opt_newarray_pack_buffer(ec, array_len,
ptr, fmt,
Qundef);
6495vm_track_constant_cache(
ID id,
void *ic)
6498 struct rb_id_table *const_cache = vm->constant_cache;
6499 VALUE lookup_result;
6502 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6506 ics = set_init_numtable();
6507 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6522 vm->inserting_constant_cache_id = id;
6524 set_insert(ics, (st_data_t)ic);
6526 vm->inserting_constant_cache_id = (
ID)0;
6533 for (
int i = 0; segments[i]; i++) {
6534 ID id = segments[i];
6535 if (
id == idNULL)
continue;
6536 vm_track_constant_cache(
id, ic);
6545 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6546 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6548 return (ic_cref == NULL ||
6549 ic_cref == vm_get_cref(reg_ep));
6557 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6558 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6563rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6565 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6571 if (ruby_vm_const_missing_count > 0) {
6572 ruby_vm_const_missing_count = 0;
6579 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6583 ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
6587 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6588 rb_yjit_constant_ic_update(iseq, ic, pos);
6598 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6601 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6604 ruby_vm_constant_cache_misses++;
6605 val = vm_get_ev_const_chain(ec, segments);
6606 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6609 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6621 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6622 return is->once.value;
6624 else if (is->once.running_thread == NULL) {
6626 is->once.running_thread = th;
6631 RB_OBJ_SET_SHAREABLE(val);
6637 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6640 else if (is->once.running_thread == th) {
6642 return vm_once_exec((
VALUE)iseq);
6646 RUBY_VM_CHECK_INTS(ec);
6653vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6655 switch (OBJ_BUILTIN_TYPE(key)) {
6661 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6662 SYMBOL_REDEFINED_OP_FLAG |
6663 INTEGER_REDEFINED_OP_FLAG |
6664 FLOAT_REDEFINED_OP_FLAG |
6665 NIL_REDEFINED_OP_FLAG |
6666 TRUE_REDEFINED_OP_FLAG |
6667 FALSE_REDEFINED_OP_FLAG |
6668 STRING_REDEFINED_OP_FLAG)) {
6672 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6676 if (rb_hash_stlike_lookup(hash, key, &val)) {
6696 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6697 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6698 static const char stack_consistency_error[] =
6699 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6700#if defined RUBY_DEVEL
6701 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6706 rb_bug(stack_consistency_error, nsp, nbp);
6713 if (FIXNUM_2_P(recv, obj) &&
6714 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6715 return rb_fix_plus_fix(recv, obj);
6717 else if (FLONUM_2_P(recv, obj) &&
6718 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6726 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6731 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6732 return rb_str_opt_plus(recv, obj);
6736 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6747 if (FIXNUM_2_P(recv, obj) &&
6748 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6749 return rb_fix_minus_fix(recv, obj);
6751 else if (FLONUM_2_P(recv, obj) &&
6752 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6760 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6771 if (FIXNUM_2_P(recv, obj) &&
6772 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6773 return rb_fix_mul_fix(recv, obj);
6775 else if (FLONUM_2_P(recv, obj) &&
6776 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6784 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6795 if (FIXNUM_2_P(recv, obj) &&
6796 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6797 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6799 else if (FLONUM_2_P(recv, obj) &&
6800 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6801 return rb_flo_div_flo(recv, obj);
6808 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6809 return rb_flo_div_flo(recv, obj);
6819 if (FIXNUM_2_P(recv, obj) &&
6820 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6821 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6823 else if (FLONUM_2_P(recv, obj) &&
6824 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6832 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6843 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6844 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6846 if (!UNDEF_P(val)) {
6847 return RBOOL(!
RTEST(val));
6857 if (FIXNUM_2_P(recv, obj) &&
6858 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6861 else if (FLONUM_2_P(recv, obj) &&
6862 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6870 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6881 if (FIXNUM_2_P(recv, obj) &&
6882 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6885 else if (FLONUM_2_P(recv, obj) &&
6886 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6894 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6905 if (FIXNUM_2_P(recv, obj) &&
6906 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6909 else if (FLONUM_2_P(recv, obj) &&
6910 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6918 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6929 if (FIXNUM_2_P(recv, obj) &&
6930 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6933 else if (FLONUM_2_P(recv, obj) &&
6934 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6942 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6958 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6967 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6985 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6996 if (FIXNUM_2_P(recv, obj) &&
6997 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
7009 if (FIXNUM_2_P(recv, obj) &&
7010 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
7011 return rb_fix_aref(recv, obj);
7016 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
7018 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
7021 return rb_ary_aref1(recv, obj);
7025 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
7026 return rb_hash_aref(recv, obj);
7040 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
7046 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
7047 rb_hash_aset(recv, obj, set);
7056vm_opt_length(
VALUE recv,
int bop)
7062 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
7063 if (bop == BOP_EMPTY_P) {
7064 return LONG2NUM(RSTRING_LEN(recv));
7071 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
7075 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
7084vm_opt_empty_p(
VALUE recv)
7086 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
7099 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
7102 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
7118 case RSHIFT(~0UL, 1):
7121 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7139vm_opt_succ(
VALUE recv)
7142 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7143 return fix_succ(recv);
7149 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7160 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7161 return RBOOL(!
RTEST(recv));
7176 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7180 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7198 VALUE self = GET_SELF();
7200 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7202 if (local_hooks) local_hooks->running++;
7204 if (event & global_hooks->events) {
7207 vm_dtrace(event, ec);
7208 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7212 if (local_hooks) local_hooks->running--;
7213 if (local_hooks != NULL) {
7214 if (event & local_hooks->events) {
7217 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7223#define VM_TRACE_HOOK(target_event, val) do { \
7224 if ((pc_events & (target_event)) & enabled_flags) { \
7225 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
7232 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7233 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7234 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7240 const VALUE *pc = reg_cfp->pc;
7242 rb_event_flag_t enabled_flags = r->pub.hooks.events & ISEQ_TRACE_EVENTS;
7245 if (enabled_flags == 0 && rb_ractor_targeted_hooks_cnt(r) == 0) {
7250 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7252 unsigned int local_hooks_cnt = iseq->aux.exec.local_hooks_cnt;
7254 if (RB_UNLIKELY(local_hooks_cnt > 0)) {
7256 if (st_lookup(rb_ractor_targeted_hooks(r), (st_data_t)iseq, &val)) {
7260 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7264 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7265 enabled_flags |= iseq_local_events;
7267 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7269 if (bmethod_frame) {
7271 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7272 unsigned int bmethod_hooks_cnt = me->def->body.bmethod.local_hooks_cnt;
7273 if (RB_UNLIKELY(bmethod_hooks_cnt > 0)) {
7275 if (st_lookup(rb_ractor_targeted_hooks(r), (st_data_t)me->def, &val)) {
7278 if (bmethod_local_hooks) {
7279 bmethod_local_events = bmethod_local_hooks->events;
7284 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7288 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7296 else if (ec->trace_arg != NULL) {
7304 rb_event_flag_t bmethod_events = ractor_events | bmethod_local_events;
7307 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7310 RSTRING_PTR(rb_iseq_path(iseq)),
7311 (
int)rb_iseq_line_no(iseq, pos),
7312 RSTRING_PTR(rb_iseq_label(iseq)));
7314 VM_ASSERT(reg_cfp->pc == pc);
7315 VM_ASSERT(pc_events != 0);
7325 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7326 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7337#if VM_CHECK_MODE > 0
7338NORETURN( NOINLINE( COLDFUNC
7339void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7342Init_vm_stack_canary(
void)
7345 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7346 vm_stack_canary |= 0x01;
7348 vm_stack_canary_was_born =
true;
7353rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7357 const char *insn = rb_insns_name(i);
7361 rb_bug(
"dead canary found at %s: %s", insn, str);
7365void Init_vm_stack_canary(
void) { }
7397 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7404 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7411 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7418 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7425 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7432 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7439 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7446 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7453 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7459 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7460 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7466 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7467 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7473 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7474 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7480 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7481 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7487 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7488 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7494 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7495 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7501 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7502 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7507static builtin_invoker
7508lookup_builtin_invoker(
int argc)
7510 static const builtin_invoker invokers[] = {
7529 return invokers[argc];
7535 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7536 SETUP_CANARY(canary_p);
7537 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7538 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7539 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7546 return invoke_bf(ec, cfp, bf, argv);
7553 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7554 for (
int i=0; i<bf->argc; i++) {
7555 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7557 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7558 (
void *)(uintptr_t)bf->func_ptr);
7561 if (bf->argc == 0) {
7562 return invoke_bf(ec, cfp, bf, NULL);
7565 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7566 return invoke_bf(ec, cfp, bf, argv);
7576 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
static bool RB_FL_ABLE(VALUE obj)
Checks if the object is flaggable.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.