11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
765env_method_entry_unchecked(
VALUE obj,
int can_be_svar)
767 if (obj ==
Qfalse)
return NULL;
769 switch (imemo_type(obj)) {
786 const VALUE *ep = cfp->ep;
789 while (!VM_ENV_LOCAL_P(ep)) {
790 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
791 ep = VM_ENV_PREV_EP(ep);
794 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
800 const VALUE *ep = cfp->ep;
803 while (!VM_ENV_LOCAL_P_UNCHECKED(ep)) {
804 if ((me = env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
805 ep = VM_ENV_PREV_EP_UNCHECKED(ep);
808 return env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
814 switch (me->def->type) {
815 case VM_METHOD_TYPE_ISEQ:
816 return me->def->body.iseq.
iseqptr;
825 switch (me->def->type) {
826 case VM_METHOD_TYPE_ISEQ:
827 return me->def->body.iseq.
cref;
833#if VM_CHECK_MODE == 0
837check_cref(
VALUE obj,
int can_be_svar)
839 if (obj ==
Qfalse)
return NULL;
842 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
845 switch (imemo_type(obj)) {
856 rb_bug(
"check_method_entry: svar should not be there:");
863vm_env_cref(
const VALUE *ep)
867 while (!VM_ENV_LOCAL_P(ep)) {
868 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
869 ep = VM_ENV_PREV_EP(ep);
872 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
876is_cref(
const VALUE v,
int can_be_svar)
879 switch (imemo_type(v)) {
892vm_env_cref_by_cref(
const VALUE *ep)
894 while (!VM_ENV_LOCAL_P(ep)) {
895 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
896 ep = VM_ENV_PREV_EP(ep);
898 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
902cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
904 const VALUE v = *vptr;
908 switch (imemo_type(v)) {
911 new_cref = vm_cref_dup(cref);
916 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
921 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
925 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
934vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
936 if (vm_env_cref_by_cref(ep)) {
940 while (!VM_ENV_LOCAL_P(ep)) {
941 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
942 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
945 ep = VM_ENV_PREV_EP(ep);
947 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
948 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
951 rb_bug(
"vm_cref_dup: unreachable");
956vm_get_cref(
const VALUE *ep)
964 rb_bug(
"vm_get_cref: unreachable");
969rb_vm_get_cref(
const VALUE *ep)
971 return vm_get_cref(ep);
982 return vm_get_cref(cfp->ep);
986vm_get_const_key_cref(
const VALUE *ep)
992 if (CREF_DYNAMIC(cref) ||
993 RCLASS_CLONED_P(CREF_CLASS(cref))) {
996 cref = CREF_NEXT(cref);
1009 #define ADD_NEW_CREF(new_cref) \
1010 if (new_cref_tail) { \
1011 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
1014 new_cref_head = new_cref; \
1016 new_cref_tail = new_cref;
1020 if (CREF_CLASS(cref) == old_klass) {
1021 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
1022 ADD_NEW_CREF(new_cref);
1023 return new_cref_head;
1025 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
1026 cref = CREF_NEXT(cref);
1027 ADD_NEW_CREF(new_cref);
1033 return new_cref_head;
1042 prev_cref = vm_env_cref(ep);
1048 prev_cref = vm_env_cref(cfp->ep);
1052 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1056vm_get_cbase(
const VALUE *ep)
1058 const rb_cref_t *cref = vm_get_cref(ep);
1060 return CREF_CLASS_FOR_DEFINITION(cref);
1064vm_get_const_base(
const VALUE *ep)
1066 const rb_cref_t *cref = vm_get_cref(ep);
1069 if (!CREF_PUSHED_BY_EVAL(cref)) {
1070 return CREF_CLASS_FOR_DEFINITION(cref);
1072 cref = CREF_NEXT(cref);
1079vm_check_if_namespace(
VALUE klass)
1082 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1087vm_ensure_not_refinement_module(
VALUE self)
1090 rb_warn(
"not defined at the refinement, but at the outer class/module");
1106 if (
NIL_P(orig_klass) && allow_nil) {
1108 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1112 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1113 root_cref = CREF_NEXT(root_cref);
1116 while (cref && CREF_NEXT(cref)) {
1117 if (CREF_PUSHED_BY_EVAL(cref)) {
1121 klass = CREF_CLASS(cref);
1123 cref = CREF_NEXT(cref);
1125 if (!
NIL_P(klass)) {
1129 if ((ce = rb_const_lookup(klass,
id))) {
1130 rb_const_warn_if_deprecated(ce, klass,
id);
1133 if (am == klass)
break;
1135 if (is_defined)
return 1;
1136 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1138 goto search_continue;
1145 if (UNLIKELY(!rb_ractor_main_p())) {
1147 rb_raise(rb_eRactorIsolationError,
1148 "can not access non-shareable objects in constant %"PRIsVALUE
"::%"PRIsVALUE
" by non-main ractor.",
rb_class_path(klass), rb_id2str(
id));
1159 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1160 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1174 vm_check_if_namespace(orig_klass);
1176 return rb_public_const_defined_from(orig_klass,
id);
1179 return rb_public_const_get_from(orig_klass,
id);
1187 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1195 int allow_nil = TRUE;
1196 if (segments[0] == idNULL) {
1201 while (segments[idx]) {
1202 ID id = segments[idx++];
1203 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1216 rb_bug(
"vm_get_cvar_base: no cref");
1219 while (CREF_NEXT(cref) &&
1220 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1221 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1222 cref = CREF_NEXT(cref);
1224 if (top_level_raise && !CREF_NEXT(cref)) {
1228 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1236ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1238fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1241 vm_cc_attr_index_set(cc, index, shape_id);
1244 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1248#define ractor_incidental_shareable_p(cond, val) \
1249 (!(cond) || rb_ractor_shareable_p(val))
1250#define ractor_object_incidental_shareable_p(obj, val) \
1251 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1260 return default_value;
1270 if (UNLIKELY(!rb_ractor_main_p())) {
1278 if (default_value ==
Qundef) {
1286 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1290 fields_obj = rb_obj_fields(obj,
id);
1294 return default_value;
1299 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1300 VALUE *ivar_list = rb_imemo_fields_ptr(fields_obj);
1302 shape_id_t cached_id;
1306 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1309 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1312 if (LIKELY(cached_id == shape_id)) {
1315 if (index == ATTR_INDEX_NOT_SET) {
1316 return default_value;
1319 val = ivar_list[index];
1320#if USE_DEBUG_COUNTER
1321 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1324 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1330#if USE_DEBUG_COUNTER
1332 if (cached_id != INVALID_SHAPE_ID) {
1333 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1336 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1340 if (cached_id != INVALID_SHAPE_ID) {
1341 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1344 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1347 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1350 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1354 if (UNLIKELY(rb_shape_too_complex_p(shape_id))) {
1358 RUBY_ASSERT(table == rb_imemo_fields_complex_tbl(fields_obj));
1360 if (!st_lookup(table,
id, &val)) {
1361 val = default_value;
1365 shape_id_t previous_cached_id = cached_id;
1366 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1369 if (cached_id != previous_cached_id) {
1370 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1373 if (index == ATTR_INDEX_NOT_SET) {
1374 val = default_value;
1378 val = ivar_list[index];
1384 vm_cc_attr_index_initialize(cc, shape_id);
1387 vm_ic_attr_index_initialize(ic, shape_id);
1390 val = default_value;
1395 if (!UNDEF_P(default_value)) {
1403 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1406 return rb_attr_get(obj,
id);
1414populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1416 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1420 vm_cc_attr_index_set(cc, index, next_shape_id);
1423 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1435 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1437 rb_check_frozen(obj);
1439 attr_index_t index = rb_ivar_set_index(obj,
id, val);
1440 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1442 if (!rb_shape_too_complex_p(next_shape_id)) {
1443 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1446 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1456 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1462 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1465NOINLINE(
static VALUE vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1467vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1469 if (UNLIKELY(!rb_ractor_main_p())) {
1473 VALUE fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1474 if (UNLIKELY(!fields_obj)) {
1478 shape_id_t shape_id = RBASIC_SHAPE_ID(fields_obj);
1481 if (shape_id == dest_shape_id) {
1482 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1484 else if (dest_shape_id != INVALID_SHAPE_ID) {
1485 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1486 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1496 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1498 if (shape_id != dest_shape_id) {
1499 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1500 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1503 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1508NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1510vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1512 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1515 if (shape_id == dest_shape_id) {
1516 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1518 else if (dest_shape_id != INVALID_SHAPE_ID) {
1519 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1520 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1530 VALUE fields_obj = rb_obj_fields(obj,
id);
1532 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1534 if (shape_id != dest_shape_id) {
1535 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1536 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1539 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1545vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1553 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1554 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1556 if (LIKELY(shape_id == dest_shape_id)) {
1557 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1560 else if (dest_shape_id != INVALID_SHAPE_ID) {
1561 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1562 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1564 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1566 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1581 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1582 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1588 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1600 VALUE defined_class = 0;
1604 defined_class =
RBASIC(defined_class)->klass;
1607 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1609 rb_bug(
"the cvc table should be set");
1613 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1614 rb_bug(
"should have cvar cache entry");
1619 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1635 cref = vm_get_cref(GET_EP());
1637 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1638 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1640 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1646 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1648 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1654 return vm_getclassvariable(iseq, cfp,
id, ic);
1661 cref = vm_get_cref(GET_EP());
1663 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1664 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1666 rb_class_ivar_set(ic->entry->class_value,
id, val);
1670 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1674 update_classvariable_cache(iseq, klass,
id, cref, ic);
1680 vm_setclassvariable(iseq, cfp,
id, val, ic);
1687 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1698 shape_id_t dest_shape_id;
1700 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1702 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1708 if (!UNDEF_P(vm_setivar_class(obj,
id, val, dest_shape_id, index))) {
1713 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1717 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1724 vm_setinstancevariable(iseq, obj,
id, val, ic);
1730 return vm_getinstancevariable(iseq, obj,
id, ic);
1739 ec->tag->state = RUBY_TAG_FATAL;
1742 ec->tag->state = TAG_THROW;
1744 else if (THROW_DATA_P(err)) {
1745 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1748 ec->tag->state = TAG_RAISE;
1755 const int flag,
const VALUE throwobj)
1763 else if (state == TAG_BREAK) {
1765 const VALUE *ep = GET_EP();
1766 const rb_iseq_t *base_iseq = GET_ISEQ();
1767 escape_cfp = reg_cfp;
1769 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1770 if (ISEQ_BODY(CFP_ISEQ(escape_cfp))->
type == ISEQ_TYPE_CLASS) {
1771 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1772 ep = escape_cfp->ep;
1773 base_iseq = CFP_ISEQ(escape_cfp);
1776 ep = VM_ENV_PREV_EP(ep);
1777 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1778 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1779 VM_ASSERT(CFP_ISEQ(escape_cfp) == base_iseq);
1783 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1789 ep = VM_ENV_PREV_EP(ep);
1791 while (escape_cfp < eocfp) {
1792 if (escape_cfp->ep == ep) {
1793 const rb_iseq_t *
const iseq = CFP_ISEQ(escape_cfp);
1794 const VALUE epc = CFP_PC(escape_cfp) - ISEQ_BODY(iseq)->iseq_encoded;
1799 for (i=0; i < ct->size; i++) {
1801 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1803 if (entry->type == CATCH_TYPE_BREAK &&
1804 entry->iseq == base_iseq &&
1805 entry->start < epc && entry->end >= epc) {
1806 if (entry->cont == epc) {
1815 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1820 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1823 else if (state == TAG_RETRY) {
1824 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1826 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1828 else if (state == TAG_RETURN) {
1829 const VALUE *current_ep = GET_EP();
1830 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1831 int in_class_frame = 0;
1833 escape_cfp = reg_cfp;
1836 while (!VM_ENV_LOCAL_P(ep)) {
1837 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1840 ep = VM_ENV_PREV_EP(ep);
1844 while (escape_cfp < eocfp) {
1845 const VALUE *lep = VM_CF_LEP(escape_cfp);
1851 if (lep == target_lep &&
1852 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1853 ISEQ_BODY(CFP_ISEQ(escape_cfp))->
type == ISEQ_TYPE_CLASS) {
1858 if (lep == target_lep) {
1859 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1861 if (in_class_frame) {
1866 const VALUE *tep = current_ep;
1868 while (target_lep != tep) {
1869 if (escape_cfp->ep == tep) {
1871 if (tep == target_ep) {
1875 goto unexpected_return;
1878 tep = VM_ENV_PREV_EP(tep);
1882 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1883 switch (ISEQ_BODY(CFP_ISEQ(escape_cfp))->
type) {
1885 case ISEQ_TYPE_MAIN:
1887 if (in_class_frame)
goto unexpected_return;
1888 if (target_ep == NULL) {
1892 goto unexpected_return;
1896 case ISEQ_TYPE_EVAL: {
1897 const rb_iseq_t *is = CFP_ISEQ(escape_cfp);
1898 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1899 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1900 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1901 t = ISEQ_BODY(is)->type;
1903 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1906 case ISEQ_TYPE_CLASS:
1915 if (escape_cfp->ep == target_lep && ISEQ_BODY(CFP_ISEQ(escape_cfp))->
type == ISEQ_TYPE_METHOD) {
1916 if (target_ep == NULL) {
1920 goto unexpected_return;
1924 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1927 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1933 rb_bug(
"isns(throw): unsupported throw type");
1936 ec->tag->state = state;
1937 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1942 rb_num_t throw_state,
VALUE throwobj)
1944 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1945 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1948 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1951 return vm_throw_continue(ec, throwobj);
1958 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1964 int is_splat = flag & 0x01;
1967 const VALUE obj = ary;
1979 if (num + is_splat == 0) {
1982 else if (flag & 0x02) {
1987 for (i = 0; i < num -
len; i++) {
1992 for (j = 0; i < num; i++, j++) {
2014 for (; i < num -
len; i++) {
2018 for (rb_num_t j = 0; i < num; i++, j++) {
2019 *cfp->sp++ = ptr[
len - j - 1];
2023 for (rb_num_t j = 0; j < num; j++) {
2024 *cfp->sp++ = ptr[num - j - 1];
2039 int initial_capa = 2;
2041#if VM_CHECK_MODE > 0
2042 ccs->debug_sig = ~(
VALUE)ccs;
2044 ccs->capa = initial_capa;
2049 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2057 if (! vm_cc_markable(cc)) {
2061 if (UNLIKELY(ccs->len == ccs->capa)) {
2064 ccs = ruby_xrealloc(ccs, vm_ccs_alloc_size(ccs->capa));
2065#if VM_CHECK_MODE > 0
2066 ccs->debug_sig = ~(
VALUE)ccs;
2069 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2071 VM_ASSERT(ccs->len < ccs->capa);
2073 const int pos = ccs->len++;
2074 ccs->entries[pos].argc = vm_ci_argc(ci);
2075 ccs->entries[pos].flag = vm_ci_flag(ci);
2078 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2084#if VM_CHECK_MODE > 0
2088 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2089 for (
int i=0; i<ccs->len; i++) {
2090 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2091 ccs->entries[i].flag,
2092 ccs->entries[i].argc);
2093 rp(ccs->entries[i].cc);
2100 VM_ASSERT(vm_ccs_p(ccs));
2101 VM_ASSERT(ccs->len <= ccs->capa);
2103 for (
int i=0; i<ccs->len; i++) {
2106 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2107 VM_ASSERT(vm_cc_class_check(cc, klass));
2108 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2109 VM_ASSERT(!vm_cc_super_p(cc));
2110 VM_ASSERT(!vm_cc_refinement_p(cc));
2121 ASSERT_vm_locking();
2123 if (rb_multi_ractor_p()) {
2124 if (RCLASS_WRITABLE_CC_TBL(klass) != cc_tbl) {
2131 rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj);
2134 if (!ccs || !METHOD_ENTRY_INVALIDATED(ccs->cme)) {
2139 VALUE new_table = rb_vm_cc_table_dup(cc_tbl);
2140 rb_vm_cc_table_delete(new_table, mid);
2141 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), new_table);
2144 rb_vm_cc_table_delete(cc_tbl, mid);
2151 ASSERT_vm_locking();
2153 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2157 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2161 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2162 return &vm_empty_cc;
2165 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2166 const VALUE original_cc_table = cc_tbl;
2169 cc_tbl = rb_vm_cc_table_create(1);
2171 else if (rb_multi_ractor_p()) {
2172 cc_tbl = rb_vm_cc_table_dup(cc_tbl);
2175 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2184 if (UNLIKELY(rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj))) {
2189 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2193 cme = rb_check_overloaded_cme(cme, ci);
2195 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2196 vm_ccs_push(cc_tbl, mid, ccs, ci, cc);
2198 VM_ASSERT(vm_cc_cme(cc) != NULL);
2199 VM_ASSERT(cme->called_id == mid);
2200 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2202 if (original_cc_table != cc_tbl) {
2203 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), cc_tbl);
2215 cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
2223 if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj)) {
2225 const int ccs_len = ccs->len;
2227 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2229 vm_evict_cc(klass, cc_tbl, mid);
2234 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2239 unsigned int argc = vm_ci_argc(ci);
2240 unsigned int flag = vm_ci_flag(ci);
2242 for (
int i=0; i<ccs_len; i++) {
2243 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2244 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2245 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2247 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2249 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2250 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2252 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2253 VM_ASSERT(ccs_cc->klass == klass);
2254 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2270 const ID mid = vm_ci_mid(ci);
2272 const struct rb_callcache *cc = vm_lookup_cc(klass, ci, mid);
2278 if (rb_multi_ractor_p()) {
2281 cc = vm_lookup_cc(klass, ci, mid);
2285 cc = vm_populate_cc(klass, ci, mid);
2299 cc = vm_search_cc(klass, ci);
2302 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2303 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2304 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2305 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2306 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2314#if USE_DEBUG_COUNTER
2318 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2320#if OPT_INLINE_METHOD_CACHE
2324 if (cd_owner && cc != empty_cc) {
2328#if USE_DEBUG_COUNTER
2329 if (!old_cc || old_cc == empty_cc) {
2331 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2333 else if (old_cc == cc) {
2334 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2336 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2337 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2339 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2340 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2341 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2344 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2349 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2350 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2361#if OPT_INLINE_METHOD_CACHE
2362 if (LIKELY(vm_cc_class_check(cc, klass))) {
2363 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2364 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2365 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2366 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2367 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2368 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2372 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2375 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2379 return vm_search_method_slowpath0((
VALUE)CFP_ISEQ(reg_cfp), cd, klass);
2386 VM_ASSERT(klass !=
Qfalse);
2389 const struct rb_callcache *cc = vm_search_method_fastpath(reg_cfp, cd, klass);
2390 return vm_cc_cme(cc);
2399 const struct rb_callcache *cc = vm_search_method_slowpath0(cd_owner, cd, klass);
2400 return vm_cc_cme(cc);
2403#if __has_attribute(transparent_union)
2416 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2417 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2418 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2419 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2420 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2421 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2424# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2427# define make_cfunc_type(f) (cfunc_type)(f)
2437 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2438 VM_ASSERT(callable_method_entry_p(me));
2440 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2444#if __has_attribute(transparent_union)
2445 return me->def->body.cfunc.func == func.anyargs;
2447 return me->def->body.cfunc.func == func;
2456 return me && METHOD_ENTRY_BASIC(me);
2462 VM_ASSERT(reg_cfp != NULL);
2464 return check_cfunc(cme, func);
2470 return check_cfunc(me, func);
2479 const struct rb_callcache *cc = vm_search_method_slowpath0((
VALUE)iseq, cd, klass);
2481 return check_cfunc(cme, func);
2484#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2485#define vm_method_cfunc_is(reg_cfp, cd, recv, func) vm_method_cfunc_is(reg_cfp, cd, recv, make_cfunc_type(func))
2487#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2519opt_equality_specialized(
VALUE recv,
VALUE obj)
2521 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2522 goto compare_by_identity;
2524 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2525 goto compare_by_identity;
2528 goto compare_by_identity;
2537 return RBOOL(a == b);
2544 return rb_str_eql_internal(obj, recv);
2549 compare_by_identity:
2550 return RBOOL(recv == obj);
2556 VM_ASSERT(reg_cfp != NULL);
2558 VALUE val = opt_equality_specialized(recv, obj);
2559 if (!UNDEF_P(val))
return val;
2561 if (!vm_method_cfunc_is(reg_cfp, cd, recv, rb_obj_equal)) {
2565 return RBOOL(recv == obj);
2569#undef EQ_UNREDEFINED_P
2572NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2575opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2577 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2579 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2580 return RBOOL(recv == obj);
2590 VALUE val = opt_equality_specialized(recv, obj);
2591 if (!UNDEF_P(val)) {
2595 return opt_equality_by_mid_slowpath(recv, obj, mid);
2602 return opt_equality_by_mid(obj1, obj2, idEq);
2608 return opt_equality_by_mid(obj1, obj2, idEqlP);
2618 case VM_CHECKMATCH_TYPE_WHEN:
2620 case VM_CHECKMATCH_TYPE_RESCUE:
2622 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2625 case VM_CHECKMATCH_TYPE_CASE: {
2626 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2629 rb_bug(
"check_match: unreachable");
2635double_cmp_lt(
double a,
double b)
2637 return RBOOL(a < b);
2641double_cmp_le(
double a,
double b)
2643 return RBOOL(a <= b);
2647double_cmp_gt(
double a,
double b)
2649 return RBOOL(a > b);
2653double_cmp_ge(
double a,
double b)
2655 return RBOOL(a >= b);
2659static inline VALUE *
2664 if (CFP_ISEQ(cfp) && VM_FRAME_RUBYFRAME_P(cfp)) {
2665 VALUE *bp = prev_cfp->sp + ISEQ_BODY(CFP_ISEQ(cfp))->local_table_size + VM_ENV_DATA_SIZE;
2667 if (ISEQ_BODY(CFP_ISEQ(cfp))->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2668 int lts = ISEQ_BODY(CFP_ISEQ(cfp))->local_table_size;
2669 int params = ISEQ_BODY(CFP_ISEQ(cfp))->param.size;
2672 bp += vm_ci_argc(ci);
2675 if (ISEQ_BODY(CFP_ISEQ(cfp))->
type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2679#if VM_DEBUG_BP_CHECK
2680 if (bp != cfp->bp_check) {
2681 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2682 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2683 (
long)(bp - GET_EC()->vm_stack));
2684 rb_bug(
"vm_base_ptr: unreachable");
2697 return vm_base_ptr(cfp);
2712static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2717 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2719 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2725 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2728 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2729 int param = ISEQ_BODY(iseq)->param.size;
2730 int local = ISEQ_BODY(iseq)->local_table_size;
2731 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2737 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2738 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2739 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2740 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2741 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2742 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2743 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2744 ISEQ_BODY(iseq)->param.flags.has_block == FALSE &&
2745 ISEQ_BODY(iseq)->param.flags.accepts_no_block == FALSE;
2749rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2751 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2752 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2753 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2754 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2755 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2756 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2757 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2758 ISEQ_BODY(iseq)->param.flags.has_block == FALSE &&
2759 ISEQ_BODY(iseq)->param.flags.accepts_no_block == FALSE;
2763rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2765 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2766 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2767 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2768 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2769 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2770 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2771 ISEQ_BODY(iseq)->param.flags.has_block == FALSE &&
2772 ISEQ_BODY(iseq)->param.flags.accepts_no_block == FALSE;
2775#define ALLOW_HEAP_ARGV (-2)
2776#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2781 vm_check_canary(GET_EC(), cfp->sp);
2787 int argc = calling->argc;
2789 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2793 VALUE *argv = cfp->sp - argc;
2797 cfp->sp -= argc - 1;
2798 cfp->sp[-1] = argv_ary;
2800 calling->heap_argv = argv_ary;
2806 if (max_args >= 0 &&
len + argc > max_args) {
2814 calling->argc +=
len - (max_args - argc + 1);
2815 len = max_args - argc + 1;
2824 calling->heap_argv = 0;
2826 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2828 for (i = 0; i <
len; i++) {
2829 *cfp->sp++ = ptr[i];
2841 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2842 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2843 const VALUE h = rb_hash_new_with_size(kw_len);
2844 VALUE *sp = cfp->sp;
2847 for (i=0; i<kw_len; i++) {
2848 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2852 cfp->sp -= kw_len - 1;
2853 calling->argc -= kw_len - 1;
2854 calling->kw_splat = 1;
2858vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2861 if (keyword_hash !=
Qnil) {
2863 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2866 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2872 keyword_hash = rb_hash_dup(keyword_hash);
2874 return keyword_hash;
2880 const struct rb_callinfo *restrict ci,
int max_args)
2882 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2883 if (IS_ARGS_KW_SPLAT(ci)) {
2885 VM_ASSERT(calling->kw_splat == 1);
2889 VALUE ary = cfp->sp[0];
2890 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2893 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2897 if (UNLIKELY(calling->heap_argv)) {
2899 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2900 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2901 calling->kw_splat = 0;
2909 VM_ASSERT(calling->kw_splat == 1);
2913 calling->kw_splat = 0;
2918 VM_ASSERT(calling->kw_splat == 0);
2922 VALUE ary = cfp->sp[0];
2924 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2929 VALUE last_hash, argv_ary;
2930 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2931 if (!IS_ARGS_KEYWORD(ci) &&
2934 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2939 calling->kw_splat = 1;
2945 if (!IS_ARGS_KEYWORD(ci) &&
2946 calling->argc > 0 &&
2948 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2955 cfp->sp[-1] = rb_hash_dup(last_hash);
2956 calling->kw_splat = 1;
2962 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2964 VM_ASSERT(calling->kw_splat == 1);
2965 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2970 calling->kw_splat = 0;
2976 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2978 VM_ASSERT(calling->kw_splat == 0);
2984 vm_caller_setup_arg_kw(cfp, calling, ci);
2988#define USE_OPT_HIST 0
2991#define OPT_HIST_MAX 64
2992static int opt_hist[OPT_HIST_MAX+1];
2996opt_hist_show_results_at_exit(
void)
2998 for (
int i=0; i<OPT_HIST_MAX; i++) {
2999 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
3009 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3010 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3011 const int opt = calling->argc - lead_num;
3012 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3013 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3014 const int param = ISEQ_BODY(iseq)->param.size;
3015 const int local = ISEQ_BODY(iseq)->local_table_size;
3016 const int delta = opt_num - opt;
3018 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3021 if (opt_pc < OPT_HIST_MAX) {
3025 opt_hist[OPT_HIST_MAX]++;
3029 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
3037 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3038 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3039 const int opt = calling->argc - lead_num;
3040 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3042 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3045 if (opt_pc < OPT_HIST_MAX) {
3049 opt_hist[OPT_HIST_MAX]++;
3053 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3058 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
3059 VALUE *
const locals);
3066 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3067 int param_size = ISEQ_BODY(iseq)->param.size;
3068 int local_size = ISEQ_BODY(iseq)->local_table_size;
3071 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3073 local_size = local_size + vm_ci_argc(calling->cd->ci);
3074 param_size = param_size + vm_ci_argc(calling->cd->ci);
3076 cfp->sp[0] = (
VALUE)calling->cd->ci;
3078 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
3088 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
3089 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
3091 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3092 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3094 const int ci_kw_len = kw_arg->keyword_len;
3095 const VALUE *
const ci_keywords = kw_arg->keywords;
3096 VALUE *argv = cfp->sp - calling->argc;
3097 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3098 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3100 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3101 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3103 int param = ISEQ_BODY(iseq)->param.size;
3104 int local = ISEQ_BODY(iseq)->local_table_size;
3105 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3112 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
3115 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
3116 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3118 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3119 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3120 VALUE *
const argv = cfp->sp - calling->argc;
3121 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3124 for (i=0; i<kw_param->num; i++) {
3125 klocals[i] = kw_param->default_values[i];
3132 int param = ISEQ_BODY(iseq)->param.size;
3133 int local = ISEQ_BODY(iseq)->local_table_size;
3134 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3144 cfp->sp -= (calling->argc + 1);
3145 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3146 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3155 set_table *dup_check_table = &vm->unused_block_warning_table;
3165 .v = (
VALUE)cme->def,
3169 if (!strict_unused_block) {
3170 key = (st_data_t)cme->def->original_id;
3172 if (set_table_lookup(dup_check_table, key)) {
3182 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3187 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3188 fprintf(stderr,
"key:%p\n", (
void *)key);
3192 if (set_insert(dup_check_table, key)) {
3197 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3199 if (!
NIL_P(m_loc)) {
3200 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3204 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3211 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3216 VM_ASSERT((vm_ci_argc(ci), 1));
3217 VM_ASSERT(vm_cc_cme(cc) != NULL);
3219 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3220 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3221 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3222 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)CFP_PC(ec->cfp));
3225 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3226 if (LIKELY(rb_simple_iseq_p(iseq))) {
3228 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3229 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3231 if (calling->argc != lead_num) {
3232 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3236 VM_ASSERT(cc == calling->cc);
3238 if (vm_call_iseq_optimizable_p(ci, cc)) {
3239 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) && ruby_vm_c_events_enabled == 0) {
3240 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3241 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3242 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3245 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3250 else if (rb_iseq_only_optparam_p(iseq)) {
3253 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3254 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3256 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3257 const int argc = calling->argc;
3258 const int opt = argc - lead_num;
3260 if (opt < 0 || opt > opt_num) {
3261 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3264 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3265 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3266 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3267 vm_call_cacheable(ci, cc));
3270 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3271 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3272 vm_call_cacheable(ci, cc));
3276 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3277 for (
int i=argc; i<lead_num + opt_num; i++) {
3280 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3282 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3283 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3284 const int argc = calling->argc;
3285 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3287 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3290 if (argc - kw_arg->keyword_len == lead_num) {
3291 const int ci_kw_len = kw_arg->keyword_len;
3292 const VALUE *
const ci_keywords = kw_arg->keywords;
3294 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3296 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3297 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3299 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3300 vm_call_cacheable(ci, cc));
3305 else if (argc == lead_num) {
3307 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3308 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3310 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3312 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3313 vm_call_cacheable(ci, cc));
3339 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3340 bool can_fastpath =
true;
3342 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3344 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3345 ci = vm_ci_new_runtime(
3352 ci = forward_cd->caller_ci;
3354 can_fastpath =
false;
3358 if (!vm_ci_markable(ci)) {
3359 ci = vm_ci_new_runtime(
3364 can_fastpath =
false;
3366 argv[param_size - 1] = (
VALUE)ci;
3367 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3371 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3398 const VALUE * lep = VM_CF_LEP(cfp);
3404 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3409 iseq = CFP_ISEQ(rb_vm_search_cf_from_ep(ec, cfp, lep));
3413 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3415 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3416 VALUE * to = cfp->sp - 1;
3420 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3425 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3427 cfp->sp = to + argc;
3446 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3449 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3450 int param_size = ISEQ_BODY(iseq)->param.size;
3451 int local_size = ISEQ_BODY(iseq)->local_table_size;
3453 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3455 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3456 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3462 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3465 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3466 int param_size = ISEQ_BODY(iseq)->param.size;
3467 int local_size = ISEQ_BODY(iseq)->local_table_size;
3469 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3472 local_size = local_size + vm_ci_argc(calling->cd->ci);
3473 param_size = param_size + vm_ci_argc(calling->cd->ci);
3475 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3476 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3481 int opt_pc,
int param_size,
int local_size)
3486 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3487 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3490 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3496 int opt_pc,
int param_size,
int local_size)
3498 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3499 VALUE *argv = cfp->sp - calling->argc;
3500 VALUE *sp = argv + param_size;
3501 cfp->sp = argv - 1 ;
3503 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3504 calling->block_handler, (
VALUE)me,
3505 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3506 local_size - param_size,
3507 ISEQ_BODY(iseq)->stack_max);
3516 VALUE *argv = cfp->sp - calling->argc;
3518 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3519 VALUE *src_argv = argv;
3520 VALUE *sp_orig, *sp;
3521 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3523 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3524 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3525 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3526 dst_captured->code.val = src_captured->code.val;
3527 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3528 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3531 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3535 vm_pop_frame(ec, cfp, cfp->ep);
3538 sp_orig = sp = cfp->sp;
3541 sp[0] = calling->recv;
3545 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3546 *sp++ = src_argv[i];
3549 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3550 calling->recv, calling->block_handler, (
VALUE)me,
3551 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3552 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3553 ISEQ_BODY(iseq)->stack_max);
3561ractor_unsafe_check(
void)
3563 if (!rb_ractor_main_p()) {
3564 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3571 ractor_unsafe_check();
3579 ractor_unsafe_check();
3581 return (*f)(argc, argv, recv);
3587 ractor_unsafe_check();
3595 ractor_unsafe_check();
3597 return (*f)(recv, argv[0]);
3603 ractor_unsafe_check();
3605 return (*f)(recv, argv[0], argv[1]);
3611 ractor_unsafe_check();
3613 return (*f)(recv, argv[0], argv[1], argv[2]);
3619 ractor_unsafe_check();
3621 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3627 ractor_unsafe_check();
3628 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3629 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3635 ractor_unsafe_check();
3636 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3637 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3643 ractor_unsafe_check();
3644 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3645 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3651 ractor_unsafe_check();
3652 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3653 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3659 ractor_unsafe_check();
3660 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3661 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3667 ractor_unsafe_check();
3668 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3669 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3675 ractor_unsafe_check();
3676 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3677 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3683 ractor_unsafe_check();
3684 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3685 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3691 ractor_unsafe_check();
3692 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3693 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3699 ractor_unsafe_check();
3700 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3701 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3707 ractor_unsafe_check();
3708 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3709 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3723 return (*f)(argc, argv, recv);
3737 return (*f)(recv, argv[0]);
3744 return (*f)(recv, argv[0], argv[1]);
3751 return (*f)(recv, argv[0], argv[1], argv[2]);
3758 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3764 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3765 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3771 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3772 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3778 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3779 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3785 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3786 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3792 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3793 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3799 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3800 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3806 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3807 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3813 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3814 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3820 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3821 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3827 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3828 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3834 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3835 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3841 const int ov_flags = RAISED_STACKOVERFLOW;
3842 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3843 if (rb_ec_raised_p(ec, ov_flags)) {
3844 rb_ec_raised_reset(ec, ov_flags);
3850#define CHECK_CFP_CONSISTENCY(func) \
3851 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3852 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3858#if VM_DEBUG_VERIFY_METHOD_CACHE
3859 switch (me->def->type) {
3860 case VM_METHOD_TYPE_CFUNC:
3861 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3863# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3865 METHOD_BUG(ATTRSET);
3867 METHOD_BUG(BMETHOD);
3870 METHOD_BUG(OPTIMIZED);
3871 METHOD_BUG(MISSING);
3872 METHOD_BUG(REFINED);
3876 rb_bug(
"wrong method type: %d", me->def->type);
3879 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3886 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3893 VALUE recv = calling->recv;
3894 VALUE block_handler = calling->block_handler;
3895 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3897 if (UNLIKELY(calling->kw_splat)) {
3898 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3901 VM_ASSERT(reg_cfp == ec->cfp);
3903 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3906 vm_push_frame(ec, NULL, frame_type, recv,
3907 block_handler, (
VALUE)me,
3908 0, ec->cfp->sp, 0, 0);
3910 int len = cfunc->argc;
3913 reg_cfp->sp = stack_bottom;
3914 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3916 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3918 rb_vm_pop_frame(ec);
3920 VM_ASSERT(ec->cfp->sp == stack_bottom);
3922 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3923 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3933 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3935 VALUE *sp = ec->cfp->sp;
3936 VALUE recv = *(sp - recv_idx - 1);
3937 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3938 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3939#if VM_CHECK_MODE > 0
3941 *(GET_EC()->cfp->sp) =
Qfalse;
3943 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3948rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3950 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3956 int argc = calling->argc;
3957 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3958 VALUE *argv = &stack_bottom[1];
3960 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3967 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3969 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3971 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3972 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3975 VALUE *stack_bottom = reg_cfp->sp - 2;
3977 VM_ASSERT(calling->argc == 1);
3981 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3984 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3986 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3993 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3996 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3997 return vm_call_cfunc_other(ec, reg_cfp, calling);
4001 calling->kw_splat = 0;
4003 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
4004 VALUE *sp = stack_bottom;
4005 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
4006 for(i = 0; i < argc; i++) {
4011 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
4017 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
4018 VALUE argv_ary = reg_cfp->sp[-1];
4022 int argc_offset = 0;
4024 if (UNLIKELY(argc > 0 &&
4026 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
4028 return vm_call_cfunc_other(ec, reg_cfp, calling);
4032 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
4038 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
4039 VALUE keyword_hash = reg_cfp->sp[-1];
4042 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
4045 return vm_call_cfunc_other(ec, reg_cfp, calling);
4052 RB_DEBUG_COUNTER_INC(ccf_cfunc);
4054 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4055 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
4057 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
4058 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
4060 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
4062 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
4063 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
4067 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
4068 return vm_call_cfunc_other(ec, reg_cfp, calling);
4075 RB_DEBUG_COUNTER_INC(ccf_ivar);
4077 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
4084 RB_DEBUG_COUNTER_INC(ccf_attrset);
4085 VALUE val = *(cfp->sp - 1);
4088 shape_id_t dest_shape_id;
4089 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
4090 ID id = vm_cc_cme(cc)->def->body.attr.id;
4091 rb_check_frozen(obj);
4092 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
4100 res = vm_setivar_class(obj,
id, val, dest_shape_id, index);
4101 if (!UNDEF_P(res)) {
4108 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
4109 if (!UNDEF_P(res)) {
4114 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
4122 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4132 VALUE procv = cme->def->body.bmethod.proc;
4135 cme->def->body.bmethod.defined_ractor_id != rb_ec_ractor_id(ec)) {
4136 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4140 GetProcPtr(procv, proc);
4141 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4151 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4155 VALUE procv = cme->def->body.bmethod.proc;
4158 cme->def->body.bmethod.defined_ractor_id != rb_ec_ractor_id(ec)) {
4159 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4163 GetProcPtr(procv, proc);
4164 const struct rb_block *block = &proc->block;
4166 while (vm_block_type(block) == block_type_proc) {
4167 block = vm_proc_block(block->as.proc);
4169 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4172 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4173 VALUE *
const argv = cfp->sp - calling->argc;
4174 const int arg_size = ISEQ_BODY(iseq)->param.size;
4177 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4178 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4181 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4186 vm_push_frame(ec, iseq,
4187 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4189 VM_GUARDED_PREV_EP(captured->ep),
4191 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4193 ISEQ_BODY(iseq)->local_table_size - arg_size,
4194 ISEQ_BODY(iseq)->stack_max);
4202 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4206 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4207 if (UNLIKELY(calling->heap_argv)) {
4212 argc = calling->argc;
4215 cfp->sp += - argc - 1;
4218 return vm_call_bmethod_body(ec, calling, argv);
4224 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4228 VALUE procv = cme->def->body.bmethod.proc;
4230 GetProcPtr(procv, proc);
4231 const struct rb_block *block = &proc->block;
4233 while (vm_block_type(block) == block_type_proc) {
4234 block = vm_proc_block(block->as.proc);
4236 if (vm_block_type(block) == block_type_iseq) {
4237 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4238 return vm_call_iseq_bmethod(ec, cfp, calling);
4241 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4242 return vm_call_noniseq_bmethod(ec, cfp, calling);
4246rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4248 VALUE klass = current_class;
4256 while (
RTEST(klass)) {
4258 if (owner == target_owner) {
4264 return current_class;
4273 if (orig_me->defined_class == 0) {
4274 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4275 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4276 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4278 if (me->def->reference_count == 1) {
4279 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4283 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4291 VM_ASSERT(callable_method_entry_p(cme));
4298 return aliased_callable_method_entry(me);
4304 calling->cc = &VM_CC_ON_STACK(
Qundef,
4307 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4309 return vm_call_method_each_type(ec, cfp, calling);
4312static enum method_missing_reason
4315 enum method_missing_reason stat = MISSING_NOENTRY;
4316 if (vm_ci_flag(ci) & VM_CALL_VCALL && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) stat |= MISSING_VCALL;
4317 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4318 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4328 ASSUME(calling->argc >= 0);
4330 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4331 int argc = calling->argc;
4332 VALUE recv = calling->recv;
4335 flags |= VM_CALL_OPT_SEND;
4337 if (UNLIKELY(! mid)) {
4338 mid = idMethodMissing;
4339 missing_reason = ci_missing_reason(ci);
4340 ec->method_missing_reason = missing_reason;
4343 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4344 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4345 rb_ary_unshift(argv_ary, symbol);
4348 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4349 VALUE exc = rb_make_no_method_exception(
4371 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4374 argc = ++calling->argc;
4376 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4379 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4380 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4381 VALUE exc = rb_make_no_method_exception(
4394 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4400 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4401 calling->cd = &new_fcd.cd;
4405 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4406 new_fcd.caller_ci = caller_ci;
4409 calling->cc = &VM_CC_ON_STACK(klass,
4411 { .method_missing_reason = missing_reason },
4412 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4414 if (flags & VM_CALL_FCALL) {
4415 return vm_call_method(ec, reg_cfp, calling);
4419 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4421 if (vm_cc_cme(cc) != NULL) {
4422 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4423 case METHOD_VISI_PUBLIC:
4424 return vm_call_method_each_type(ec, reg_cfp, calling);
4425 case METHOD_VISI_PRIVATE:
4426 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4428 case METHOD_VISI_PROTECTED:
4429 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4432 VM_UNREACHABLE(vm_call_method);
4434 return vm_call_method_missing(ec, reg_cfp, calling);
4437 return vm_call_method_nome(ec, reg_cfp, calling);
4447 i = calling->argc - 1;
4449 if (calling->argc == 0) {
4450 rb_raise(rb_eArgError,
"no method name given");
4474 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4480 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4482 int flags = VM_CALL_FCALL;
4486 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4487 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4489 flags |= VM_CALL_ARGS_SPLAT;
4490 if (calling->kw_splat) {
4491 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4492 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4493 calling->kw_splat = 0;
4495 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4498 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4499 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4505 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4506 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4512 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4515 int flags = vm_ci_flag(ci);
4517 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4518 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4519 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4520 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4521 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4522 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4525 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4526 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4531 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4533 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4535 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4536 unsigned int argc, flag;
4538 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4539 argc = ++calling->argc;
4542 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4543 vm_check_canary(ec, reg_cfp->sp);
4547 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4550 ec->method_missing_reason = reason;
4554 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4560 if (!(flag & VM_CALL_FORWARDING)) {
4561 calling->cd = &new_fcd.cd;
4565 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4566 new_fcd.caller_ci = caller_ci;
4570 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4571 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4572 return vm_call_method(ec, reg_cfp, calling);
4578 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4589 return vm_call_method_nome(ec, cfp, calling);
4591 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4592 cme->def->body.refined.orig_me) {
4593 cme = refined_method_callable_without_refinement(cme);
4596 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4598 return vm_call_method_each_type(ec, cfp, calling);
4602find_refinement(
VALUE refinements,
VALUE klass)
4604 if (
NIL_P(refinements)) {
4607 return rb_hash_lookup(refinements, klass);
4616 if (CFP_ISEQ(cfp) && ISEQ_BODY(CFP_ISEQ(cfp))->
type == ISEQ_TYPE_BLOCK) {
4617 const rb_iseq_t *local_iseq = ISEQ_BODY(CFP_ISEQ(cfp))->local_iseq;
4620 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4621 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4625 }
while (CFP_ISEQ(cfp) != local_iseq);
4636 if (orig_me->defined_class == 0) {
4644 VM_ASSERT(callable_method_entry_p(cme));
4646 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4656 ID mid = vm_ci_mid(calling->cd->ci);
4657 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4661 for (; cref; cref = CREF_NEXT(cref)) {
4662 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4663 if (
NIL_P(refinement))
continue;
4666 rb_callable_method_entry(refinement, mid);
4669 if (vm_cc_call(cc) == vm_call_super_method) {
4672 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4677 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4678 cme->def != ref_me->def) {
4681 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4690 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4691 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4706 if (calling->cd->cc) {
4707 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4709 return vm_call_method(ec, cfp, calling);
4712 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4713 calling->cc= ref_cc;
4714 return vm_call_method(ec, cfp, calling);
4718 return vm_call_method_nome(ec, cfp, calling);
4724NOINLINE(
static VALUE
4732 int argc = calling->argc;
4735 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4738 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4744 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4747 VALUE procval = calling->recv;
4748 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4754 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4756 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4759 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4760 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4763 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4764 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4765 return vm_call_general(ec, reg_cfp, calling);
4772 VALUE recv = calling->recv;
4775 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4776 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4778 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4779 return RSTRUCT_GET_RAW(recv,
off);
4785 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4787 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4795 VALUE recv = calling->recv;
4798 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4799 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4801 rb_check_frozen(recv);
4803 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4804 RSTRUCT_SET_RAW(recv,
off, val);
4812 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4814 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4822#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4823 if (UNLIKELY(ruby_vm_c_events_enabled > 0)) { \
4824 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4825 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4827 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4828 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4839 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4840 case OPTIMIZED_METHOD_TYPE_SEND:
4841 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4842 return vm_call_opt_send(ec, cfp, calling);
4843 case OPTIMIZED_METHOD_TYPE_CALL:
4844 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4845 return vm_call_opt_call(ec, cfp, calling);
4846 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4847 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4848 return vm_call_opt_block_call(ec, cfp, calling);
4849 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4850 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4854 VM_CALL_METHOD_ATTR(v,
4855 vm_call_opt_struct_aref(ec, cfp, calling),
4856 set_vm_cc_ivar(cc); \
4857 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4860 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4861 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4865 VM_CALL_METHOD_ATTR(v,
4866 vm_call_opt_struct_aset(ec, cfp, calling),
4867 set_vm_cc_ivar(cc); \
4868 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4872 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4884 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4886 switch (cme->def->type) {
4887 case VM_METHOD_TYPE_ISEQ:
4888 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4889 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4890 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4893 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4894 return vm_call_iseq_setup(ec, cfp, calling);
4897 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4898 case VM_METHOD_TYPE_CFUNC:
4899 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4900 return vm_call_cfunc(ec, cfp, calling);
4902 case VM_METHOD_TYPE_ATTRSET:
4903 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4907 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4909 if (vm_cc_markable(cc)) {
4910 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4911 VM_CALL_METHOD_ATTR(v,
4912 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4913 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4919 VM_CALLCACHE_UNMARKABLE |
4920 VM_CALLCACHE_ON_STACK,
4926 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4931 VM_CALL_METHOD_ATTR(v,
4932 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4933 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4937 case VM_METHOD_TYPE_IVAR:
4938 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4940 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4941 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4942 VM_CALL_METHOD_ATTR(v,
4943 vm_call_ivar(ec, cfp, calling),
4944 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4947 case VM_METHOD_TYPE_MISSING:
4948 vm_cc_method_missing_reason_set(cc, 0);
4949 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4950 return vm_call_method_missing(ec, cfp, calling);
4952 case VM_METHOD_TYPE_BMETHOD:
4953 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4954 return vm_call_bmethod(ec, cfp, calling);
4956 case VM_METHOD_TYPE_ALIAS:
4957 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4958 return vm_call_alias(ec, cfp, calling);
4960 case VM_METHOD_TYPE_OPTIMIZED:
4961 return vm_call_optimized(ec, cfp, calling, ci, cc);
4963 case VM_METHOD_TYPE_UNDEF:
4966 case VM_METHOD_TYPE_ZSUPER:
4967 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4969 case VM_METHOD_TYPE_REFINED:
4972 return vm_call_refined(ec, cfp, calling);
4975 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4985 const int stat = ci_missing_reason(ci);
4987 if (vm_ci_mid(ci) == idMethodMissing) {
4988 if (UNLIKELY(calling->heap_argv)) {
4993 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4994 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4998 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
5010 VALUE defined_class = me->defined_class;
5011 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
5012 return NIL_P(refined_class) ? defined_class : refined_class;
5021 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
5023 if (vm_cc_cme(cc) != NULL) {
5024 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
5025 case METHOD_VISI_PUBLIC:
5026 return vm_call_method_each_type(ec, cfp, calling);
5028 case METHOD_VISI_PRIVATE:
5029 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
5030 enum method_missing_reason stat = MISSING_PRIVATE;
5031 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
5033 vm_cc_method_missing_reason_set(cc, stat);
5034 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
5035 return vm_call_method_missing(ec, cfp, calling);
5037 return vm_call_method_each_type(ec, cfp, calling);
5039 case METHOD_VISI_PROTECTED:
5040 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
5041 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
5043 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
5044 return vm_call_method_missing(ec, cfp, calling);
5048 VM_ASSERT(vm_cc_cme(cc) != NULL);
5051 calling->cc = &cc_on_stack;
5052 return vm_call_method_each_type(ec, cfp, calling);
5055 return vm_call_method_each_type(ec, cfp, calling);
5058 rb_bug(
"unreachable");
5062 return vm_call_method_nome(ec, cfp, calling);
5069 RB_DEBUG_COUNTER_INC(ccf_general);
5070 return vm_call_method(ec, reg_cfp, calling);
5076 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
5077 VM_ASSERT(cc != vm_cc_empty());
5079 *(vm_call_handler *)&cc->call_ = vm_call_general;
5085 RB_DEBUG_COUNTER_INC(ccf_super_method);
5090 if (ec == NULL) rb_bug(
"unreachable");
5093 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
5094 return vm_call_method(ec, reg_cfp, calling);
5100vm_search_normal_superclass(
VALUE klass)
5105 klass =
RBASIC(klass)->klass;
5107 klass = RCLASS_ORIGIN(klass);
5111NORETURN(
static void vm_super_outside(
void));
5114vm_super_outside(
void)
5120empty_cc_for_super(
void)
5122 return &vm_empty_cc_for_super;
5128 VALUE current_defined_class;
5129 const rb_iseq_t *iseq = CFP_ISEQ(reg_cfp);
5136 current_defined_class = vm_defined_class_for_protected_call(me);
5139 iseq != method_entry_iseqptr(me) &&
5142 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5146 "self has wrong type to call super in this context: "
5147 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5152 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5154 "implicit argument passing of super from method defined"
5155 " by define_method() is not supported."
5156 " Specify all arguments explicitly.");
5159 ID mid = me->def->original_id;
5161 if (!vm_ci_markable(cd->ci)) {
5162 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5166 cd->ci = vm_ci_new_runtime(mid,
5169 vm_ci_kwarg(cd->ci));
5176 VALUE klass = vm_search_normal_superclass(me->defined_class);
5180 cc = vm_cc_new(
Qundef, NULL, vm_call_method_missing, cc_type_super);
5184 cc = vm_search_method_fastpath(reg_cfp, cd, klass);
5188 if (cached_cme == NULL) {
5190 cd->cc = empty_cc_for_super();
5192 else if (cached_cme->called_id != mid) {
5195 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5199 cd->cc = cc = empty_cc_for_super();
5203 switch (cached_cme->def->type) {
5205 case VM_METHOD_TYPE_REFINED:
5207 case VM_METHOD_TYPE_ATTRSET:
5208 case VM_METHOD_TYPE_IVAR:
5209 vm_cc_call_set(cc, vm_call_super_method);
5217 VM_ASSERT((vm_cc_cme(cc),
true));
5225block_proc_is_lambda(
const VALUE procval)
5230 GetProcPtr(procval, proc);
5231 return proc->is_lambda;
5241 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5244 int is_lambda = FALSE;
5245 VALUE val, arg, blockarg;
5247 const struct vm_ifunc *ifunc = captured->code.ifunc;
5252 else if (argc == 0) {
5259 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5261 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5263 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5266 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5269 VM_GUARDED_PREV_EP(captured->ep),
5271 0, ec->cfp->sp, 0, 0);
5272 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5273 rb_vm_pop_frame(ec);
5281 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5287 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5296 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5298 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5306vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5308 VALUE ary, arg0 = argv[0];
5313 VM_ASSERT(argv[0] == arg0);
5321 if (rb_simple_iseq_p(iseq)) {
5325 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5327 if (arg_setup_type == arg_setup_block &&
5328 calling->argc == 1 &&
5329 ISEQ_BODY(iseq)->param.flags.has_lead &&
5330 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5331 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5332 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5335 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5336 if (arg_setup_type == arg_setup_block) {
5337 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5339 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5340 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5341 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5343 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5344 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5348 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5355 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5364 calling = &calling_entry;
5365 calling->argc = argc;
5366 calling->block_handler = block_handler;
5367 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5369 calling->heap_argv = 0;
5371 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5373 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5381 bool is_lambda,
VALUE block_handler)
5384 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5385 const int arg_size = ISEQ_BODY(iseq)->param.size;
5386 VALUE *
const rsp = GET_SP() - calling->argc;
5387 VALUE *
const argv = rsp;
5388 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5389 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5393 vm_push_frame(ec, iseq,
5396 VM_GUARDED_PREV_EP(captured->ep), 0,
5397 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5399 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5407 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5409 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5410 int flags = vm_ci_flag(ci);
5412 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5413 ((calling->argc == 0) ||
5414 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5415 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5416 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5417 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5419 if (UNLIKELY(calling->heap_argv)) {
5420#if VM_ARGC_STACK_MAX < 0
5422 rb_raise(rb_eArgError,
"no receiver given");
5428 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5429 reg_cfp->sp[-2] = calling->recv;
5430 flags |= VM_CALL_ARGS_SPLAT;
5433 if (calling->argc < 1) {
5434 rb_raise(rb_eArgError,
"no receiver given");
5436 calling->recv = TOPN(--calling->argc);
5438 if (calling->kw_splat) {
5439 flags |= VM_CALL_KW_SPLAT;
5443 if (calling->argc < 1) {
5444 rb_raise(rb_eArgError,
"no receiver given");
5446 calling->recv = TOPN(--calling->argc);
5449 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5455 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5460 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5461 argc = calling->argc;
5462 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5468vm_proc_to_block_handler(
VALUE procval)
5470 const struct rb_block *block = vm_proc_block(procval);
5472 switch (vm_block_type(block)) {
5473 case block_type_iseq:
5474 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5475 case block_type_ifunc:
5476 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5477 case block_type_symbol:
5478 return VM_BH_FROM_SYMBOL(block->as.symbol);
5479 case block_type_proc:
5480 return VM_BH_FROM_PROC(block->as.proc);
5482 VM_UNREACHABLE(vm_yield_with_proc);
5489 bool is_lambda,
VALUE block_handler)
5491 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5492 VALUE proc = VM_BH_TO_PROC(block_handler);
5493 is_lambda = block_proc_is_lambda(proc);
5494 block_handler = vm_proc_to_block_handler(proc);
5497 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5503 bool is_lambda,
VALUE block_handler)
5507 bool is_lambda,
VALUE block_handler);
5509 switch (vm_block_handler_type(block_handler)) {
5510 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5511 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5512 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5513 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5514 default: rb_bug(
"vm_invoke_block: unreachable");
5517 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5521vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5528 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5531 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5532 captured->code.iseq = blockiseq;
5534 return rb_vm_make_proc(ec, captured,
rb_cProc);
5538vm_once_exec(
VALUE iseq)
5545vm_once_clear(
VALUE data)
5548 is->once.running_thread = NULL;
5560 args[0] = obj; args[1] =
Qfalse;
5562 if (!UNDEF_P(r) &&
RTEST(r)) {
5574 enum defined_type
type = (
enum defined_type)op_type;
5581 return rb_gvar_defined(
SYM2ID(obj));
5583 case DEFINED_CVAR: {
5584 const rb_cref_t *cref = vm_get_cref(GET_EP());
5585 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5590 case DEFINED_CONST_FROM: {
5591 bool allow_nil =
type == DEFINED_CONST;
5593 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5598 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5600 case DEFINED_METHOD:{
5605 switch (METHOD_ENTRY_VISI(me)) {
5606 case METHOD_VISI_PRIVATE:
5608 case METHOD_VISI_PROTECTED:
5612 case METHOD_VISI_PUBLIC:
5616 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5620 return check_respond_to_missing(obj, v);
5625 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5629 case DEFINED_ZSUPER:
5634 VALUE klass = vm_search_normal_superclass(me->defined_class);
5635 if (!klass)
return false;
5637 ID id = me->def->original_id;
5644 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5646 rb_bug(
"unimplemented defined? type (VM)");
5656 return vm_defined(ec, reg_cfp, op_type, obj, v);
5660vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5663 const VALUE *ep = reg_ep;
5664 for (i = 0; i < lv; i++) {
5665 ep = GET_PREV_EP(ep);
5671vm_get_special_object(
const VALUE *
const reg_ep,
5672 enum vm_special_object_type
type)
5675 case VM_SPECIAL_OBJECT_VMCORE:
5676 return rb_mRubyVMFrozenCore;
5677 case VM_SPECIAL_OBJECT_CBASE:
5678 return vm_get_cbase(reg_ep);
5679 case VM_SPECIAL_OBJECT_CONST_BASE:
5680 return vm_get_const_base(reg_ep);
5682 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5689rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5691 return vm_get_special_object(reg_ep,
type);
5697 const VALUE ary2 = ary2st;
5698 VALUE tmp1 = rb_check_to_array(ary1);
5699 VALUE tmp2 = rb_check_to_array(ary2);
5720 const VALUE ary2 = ary2st;
5722 if (
NIL_P(ary2))
return ary1;
5724 VALUE tmp2 = rb_check_to_array(ary2);
5739 return vm_concat_array(ary1, ary2st);
5743rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5745 return vm_concat_to_array(ary1, ary2st);
5754 VALUE tmp = rb_check_to_array(ary);
5758 else if (
RTEST(flag)) {
5771 return vm_splat_array(flag, ary);
5777 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5779 if (flag & VM_CHECKMATCH_ARRAY) {
5783 for (i = 0; i < n; i++) {
5785 VALUE c = check_match(ec, v, target,
type);
5794 return check_match(ec, pattern, target,
type);
5801 return vm_check_match(ec, target, pattern, flag);
5805vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5807 const VALUE kw_bits = *(ep - bits);
5810 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5811 if ((idx < VM_KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5824 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5825 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5826 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5827 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5831 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5834 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5837 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5840 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5847vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5852 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5853 return rb_public_const_get_at(cbase,
id);
5861vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5866 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5871 "superclass mismatch for class %"PRIsVALUE
"",
5884vm_check_if_module(
ID id,
VALUE mod)
5903vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5906 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super :
rb_cObject;
5913vm_declare_module(
ID id,
VALUE cbase)
5919NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5923 VALUE name = rb_id2str(
id);
5924 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5926 VALUE location = rb_const_source_location_at(cbase,
id);
5927 if (!
NIL_P(location)) {
5928 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5929 " previous definition of %"PRIsVALUE
" was here",
5936vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5940 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5942 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5946 vm_check_if_namespace(cbase);
5951 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5952 if (!vm_check_if_class(
id, flags, super, klass))
5953 unmatched_redefinition(
"class", cbase,
id, klass);
5957 return vm_declare_class(
id, flags, cbase, super);
5962vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5966 vm_check_if_namespace(cbase);
5967 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5968 if (!vm_check_if_module(
id, mod))
5969 unmatched_redefinition(
"module", cbase,
id, mod);
5973 return vm_declare_module(
id, cbase);
5978vm_find_or_create_class_by_id(
ID id,
5983 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5986 case VM_DEFINECLASS_TYPE_CLASS:
5988 return vm_define_class(
id, flags, cbase, super);
5990 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5994 case VM_DEFINECLASS_TYPE_MODULE:
5996 return vm_define_module(
id, flags, cbase);
5999 rb_bug(
"unknown defineclass type: %d", (
int)
type);
6003static rb_method_visibility_t
6008 if (!vm_env_cref_by_cref(cfp->ep)) {
6009 return METHOD_VISI_PUBLIC;
6012 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
6021 if (!vm_env_cref_by_cref(cfp->ep)) {
6025 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
6033 rb_method_visibility_t visi;
6038 visi = METHOD_VISI_PUBLIC;
6041 klass = CREF_CLASS_FOR_DEFINITION(cref);
6042 visi = vm_scope_visibility_get(ec);
6049 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
6052 !RCLASS_SINGLETON_P(klass) &&
6054 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
6057 if (!is_singleton && vm_scope_module_func_check(ec)) {
6059 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
6068rb_vm_untag_block_handler(
VALUE block_handler)
6070 if (VM_BLOCK_HANDLER_NONE == block_handler)
return Qnil;
6072 switch (vm_block_handler_type(block_handler)) {
6073 case block_handler_type_iseq:
6074 case block_handler_type_ifunc: {
6076 return captured->code.val;
6078 case block_handler_type_proc:
6079 case block_handler_type_symbol:
6080 return block_handler;
6082 rb_bug(
"rb_vm_untag_block_handler: unreachable");
6089 return rb_vm_untag_block_handler(VM_CF_BLOCK_HANDLER(reg_cfp));
6098 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
6100 if (block_handler == VM_BLOCK_HANDLER_NONE) {
6101 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
6104 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
6108enum method_explorer_type {
6110 mexp_search_invokeblock,
6119 VALUE block_handler,
6120 enum method_explorer_type method_explorer
6125 int argc = vm_ci_argc(ci);
6126 VALUE recv = TOPN(argc);
6128 .block_handler = block_handler,
6129 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
6135 switch (method_explorer) {
6136 case mexp_search_method:
6137 calling.cc = cc = vm_search_method_fastpath(reg_cfp, cd,
CLASS_OF(recv));
6138 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6140 case mexp_search_super:
6141 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
6142 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6144 case mexp_search_invokeblock:
6145 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6155 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6156 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6170 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6172 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6174 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6175 RB_OBJ_WRITE(CFP_ISEQ(GET_CFP()), &cd->cc, adjusted_cd.cd.cc);
6186 VALUE bh = VM_BLOCK_HANDLER_NONE;
6187 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6197 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6198 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6212 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6214 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6216 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6217 RB_OBJ_WRITE(CFP_ISEQ(GET_CFP()), &cd->cc, adjusted_cd.cd.cc);
6228 VALUE bh = VM_BLOCK_HANDLER_NONE;
6229 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6257 if (check_method_basic_definition(cme)) {
6266 if (check_cfunc(cme, rb_mod_to_s)) {
6272 val = rb_mod_to_s(recv);
6278 if (check_cfunc(cme, rb_nil_to_s)) {
6279 return rb_nil_to_s(recv);
6283 if (check_cfunc(cme, rb_true_to_s)) {
6284 return rb_true_to_s(recv);
6288 if (check_cfunc(cme, rb_false_to_s)) {
6289 return rb_false_to_s(recv);
6293 if (check_cfunc(cme, rb_int_to_s)) {
6294 return rb_fix_to_s(recv);
6306 return vm_objtostring(reg_cfp, recv, cd);
6310vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6312 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6321vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6323 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6332vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6334 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6348 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6352 VALUE args[1] = {target};
6355 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6358 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6365 return vm_opt_duparray_include_p(ec, ary, target);
6371 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6372 if (array_len == 0) {
6376 VALUE result = *ptr;
6377 rb_snum_t i = array_len - 1;
6379 const VALUE v = *++ptr;
6380 if (OPTIMIZED_CMP(v, result) > 0) {
6395 return vm_opt_newarray_max(ec, array_len, ptr);
6401 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6402 if (array_len == 0) {
6406 VALUE result = *ptr;
6407 rb_snum_t i = array_len - 1;
6409 const VALUE v = *++ptr;
6410 if (OPTIMIZED_CMP(v, result) < 0) {
6425 return vm_opt_newarray_min(ec, array_len, ptr);
6432 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6433 return rb_ary_hash_values(array_len, ptr);
6443 return vm_opt_newarray_hash(ec, array_len, ptr);
6452 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6453 struct RArray fake_ary = {RBASIC_INIT};
6454 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, array_len);
6458 VALUE args[1] = {target};
6466 return vm_opt_newarray_include_p(ec, array_len,
ptr, target);
6472 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6473 struct RArray fake_ary = {RBASIC_INIT};
6474 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, array_len);
6475 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6485 if (!UNDEF_P(buffer)) {
6486 args[1] = rb_hash_new_with_size(1);
6487 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6492 return rb_vm_call_with_refinements(ec,
rb_ary_new4(array_len,
ptr), idPack, argc, args, kw_splat);
6499 return vm_opt_newarray_pack_buffer(ec, array_len,
ptr, fmt, buffer);
6505 return vm_opt_newarray_pack_buffer(ec, array_len,
ptr, fmt,
Qundef);
6511vm_track_constant_cache(
ID id,
void *ic)
6514 struct rb_id_table *const_cache = &vm->constant_cache;
6515 VALUE lookup_result;
6518 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6522 ics = set_init_numtable();
6523 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6538 vm->inserting_constant_cache_id = id;
6540 set_insert(ics, (st_data_t)ic);
6542 vm->inserting_constant_cache_id = (
ID)0;
6549 for (
int i = 0; segments[i]; i++) {
6550 ID id = segments[i];
6551 if (
id == idNULL)
continue;
6552 vm_track_constant_cache(
id, ic);
6561 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6562 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6564 return (ic_cref == NULL ||
6565 ic_cref == vm_get_cref(reg_ep));
6573 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6574 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6579rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6581 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6587 if (ruby_vm_const_missing_count > 0) {
6588 ruby_vm_const_missing_count = 0;
6595 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6599 ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
6603 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6604 rb_yjit_constant_ic_update(iseq, ic, pos);
6614 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6617 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6620 ruby_vm_constant_cache_misses++;
6621 val = vm_get_ev_const_chain(ec, segments);
6622 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6625 vm_ic_update(CFP_ISEQ(GET_CFP()), ic, val, GET_EP(), CFP_PC(GET_CFP()) - 2);
6637 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6638 return is->once.value;
6640 else if (is->once.running_thread == NULL) {
6642 is->once.running_thread = th;
6647 RB_OBJ_SET_SHAREABLE(val);
6653 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6656 else if (is->once.running_thread == th) {
6658 return vm_once_exec((
VALUE)iseq);
6662 RUBY_VM_CHECK_INTS(ec);
6669vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6671 switch (OBJ_BUILTIN_TYPE(key)) {
6677 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6678 SYMBOL_REDEFINED_OP_FLAG |
6679 INTEGER_REDEFINED_OP_FLAG |
6680 FLOAT_REDEFINED_OP_FLAG |
6681 NIL_REDEFINED_OP_FLAG |
6682 TRUE_REDEFINED_OP_FLAG |
6683 FALSE_REDEFINED_OP_FLAG |
6684 STRING_REDEFINED_OP_FLAG)) {
6688 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6692 if (rb_hash_stlike_lookup(hash, key, &val)) {
6712 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6713 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6714 static const char stack_consistency_error[] =
6715 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6716#if defined RUBY_DEVEL
6717 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6722 rb_bug(stack_consistency_error, nsp, nbp);
6729 if (FIXNUM_2_P(recv, obj) &&
6730 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6731 return rb_fix_plus_fix(recv, obj);
6733 else if (FLONUM_2_P(recv, obj) &&
6734 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6742 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6747 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6748 return rb_str_opt_plus(recv, obj);
6752 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6763 if (FIXNUM_2_P(recv, obj) &&
6764 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6765 return rb_fix_minus_fix(recv, obj);
6767 else if (FLONUM_2_P(recv, obj) &&
6768 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6776 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6787 if (FIXNUM_2_P(recv, obj) &&
6788 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6789 return rb_fix_mul_fix(recv, obj);
6791 else if (FLONUM_2_P(recv, obj) &&
6792 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6800 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6811 if (FIXNUM_2_P(recv, obj) &&
6812 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6813 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6815 else if (FLONUM_2_P(recv, obj) &&
6816 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6817 return rb_flo_div_flo(recv, obj);
6824 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6825 return rb_flo_div_flo(recv, obj);
6835 if (FIXNUM_2_P(recv, obj) &&
6836 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6837 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6839 else if (FLONUM_2_P(recv, obj) &&
6840 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6848 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6859 if (vm_method_cfunc_is(reg_cfp, cd, recv, rb_obj_not_equal)) {
6860 VALUE val = opt_equality(reg_cfp, recv, obj, cd_eq);
6862 if (!UNDEF_P(val)) {
6863 return RBOOL(!
RTEST(val));
6873 if (FIXNUM_2_P(recv, obj) &&
6874 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6877 else if (FLONUM_2_P(recv, obj) &&
6878 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6886 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6897 if (FIXNUM_2_P(recv, obj) &&
6898 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6901 else if (FLONUM_2_P(recv, obj) &&
6902 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6910 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6921 if (FIXNUM_2_P(recv, obj) &&
6922 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6925 else if (FLONUM_2_P(recv, obj) &&
6926 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6934 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6945 if (FIXNUM_2_P(recv, obj) &&
6946 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6949 else if (FLONUM_2_P(recv, obj) &&
6950 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6958 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6974 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6983 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
7001 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
7012 if (FIXNUM_2_P(recv, obj) &&
7013 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
7025 if (FIXNUM_2_P(recv, obj) &&
7026 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
7027 return rb_fix_aref(recv, obj);
7032 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
7034 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
7037 return rb_ary_aref1(recv, obj);
7041 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
7042 return rb_hash_aref(recv, obj);
7056 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
7062 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
7063 rb_hash_aset(recv, obj, set);
7072vm_opt_length(
VALUE recv,
int bop)
7078 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
7079 if (bop == BOP_EMPTY_P) {
7080 return LONG2NUM(RSTRING_LEN(recv));
7087 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
7091 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
7100vm_opt_empty_p(
VALUE recv)
7102 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
7115 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
7118 else if (vm_method_cfunc_is(reg_cfp, cd, recv, rb_false)) {
7134 case RSHIFT(~0UL, 1):
7137 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7155vm_opt_succ(
VALUE recv)
7158 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7159 return fix_succ(recv);
7165 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7176 if (vm_method_cfunc_is(reg_cfp, cd, recv, rb_obj_not)) {
7177 return RBOOL(!
RTEST(recv));
7192 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7196 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7214 VALUE self = GET_SELF();
7216 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7218 if (local_hooks) local_hooks->running++;
7220 if (event & global_hooks->events) {
7223 vm_dtrace(event, ec);
7224 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7228 if (local_hooks) local_hooks->running--;
7229 if (local_hooks != NULL) {
7230 if (event & local_hooks->events) {
7233 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7239#define VM_TRACE_HOOK(target_event, val) do { \
7240 if ((pc_events & (target_event)) & enabled_flags) { \
7241 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
7248 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7249 VM_ASSERT(ISEQ_BODY(CFP_ISEQ(cfp))->
type == ISEQ_TYPE_RESCUE);
7250 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7256 const VALUE *pc = reg_cfp->pc;
7258 rb_event_flag_t enabled_flags = r->pub.hooks.events & ISEQ_TRACE_EVENTS;
7261 if (enabled_flags == 0 && rb_ractor_targeted_hooks_cnt(r) == 0) {
7265 const rb_iseq_t *iseq = CFP_ISEQ(reg_cfp);
7266 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7268 unsigned int local_hooks_cnt = iseq->aux.exec.local_hooks_cnt;
7270 if (RB_UNLIKELY(local_hooks_cnt > 0)) {
7272 if (st_lookup(rb_ractor_targeted_hooks(r), (st_data_t)iseq, &val)) {
7276 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7280 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7281 enabled_flags |= iseq_local_events;
7283 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7285 if (bmethod_frame) {
7287 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7288 unsigned int bmethod_hooks_cnt = me->def->body.bmethod.local_hooks_cnt;
7289 if (RB_UNLIKELY(bmethod_hooks_cnt > 0)) {
7291 if (st_lookup(rb_ractor_targeted_hooks(r), (st_data_t)me->def, &val)) {
7294 if (bmethod_local_hooks) {
7295 bmethod_local_events = bmethod_local_hooks->events;
7300 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7304 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7312 else if (ec->trace_arg != NULL) {
7320 rb_event_flag_t bmethod_events = ractor_events | bmethod_local_events;
7323 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7326 RSTRING_PTR(rb_iseq_path(iseq)),
7327 (
int)rb_iseq_line_no(iseq, pos),
7328 RSTRING_PTR(rb_iseq_label(iseq)));
7330 VM_ASSERT(reg_cfp->pc == pc);
7331 VM_ASSERT(pc_events != 0);
7341 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7342 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7353#if VM_CHECK_MODE > 0
7354NORETURN( NOINLINE( COLDFUNC
7355void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7358Init_vm_stack_canary(
void)
7361 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7362 vm_stack_canary |= 0x01;
7364 vm_stack_canary_was_born =
true;
7369rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7373 const char *insn = rb_insns_name(i);
7377 rb_bug(
"dead canary found at %s: %s", insn, str);
7381void Init_vm_stack_canary(
void) { }
7413 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7420 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7427 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7434 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7441 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7448 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7455 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7462 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7469 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7475 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7476 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7482 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7483 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7489 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7490 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7496 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7497 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7503 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7504 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7510 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7511 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7517 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7518 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7523static builtin_invoker
7524lookup_builtin_invoker(
int argc)
7526 static const builtin_invoker invokers[] = {
7545 return invokers[argc];
7551 const bool canary_p = ISEQ_BODY(CFP_ISEQ(reg_cfp))->builtin_attrs & BUILTIN_ATTR_LEAF;
7552 SETUP_CANARY(canary_p);
7553 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7554 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7555 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7562 return invoke_bf(ec, cfp, bf, argv);
7569 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7570 for (
int i=0; i<bf->argc; i++) {
7571 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(CFP_ISEQ(cfp))->local_table[i+start_index]));
7573 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7574 (
void *)(uintptr_t)bf->func_ptr);
7577 if (bf->argc == 0) {
7578 return invoke_bf(ec, cfp, bf, NULL);
7581 const VALUE *argv = cfp->ep - ISEQ_BODY(CFP_ISEQ(cfp))->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7582 return invoke_bf(ec, cfp, bf, argv);
7592 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
static bool RB_FL_ABLE(VALUE obj)
Checks if the object is flaggable.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_cObject
Object class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument must be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.