11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
765env_method_entry_unchecked(
VALUE obj,
int can_be_svar)
767 if (obj ==
Qfalse)
return NULL;
769 switch (imemo_type(obj)) {
786 const VALUE *ep = cfp->ep;
789 while (!VM_ENV_LOCAL_P(ep)) {
790 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
791 ep = VM_ENV_PREV_EP(ep);
794 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
800 const VALUE *ep = cfp->ep;
803 while (!VM_ENV_LOCAL_P_UNCHECKED(ep)) {
804 if ((me = env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
805 ep = VM_ENV_PREV_EP_UNCHECKED(ep);
808 return env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
814 switch (me->def->type) {
815 case VM_METHOD_TYPE_ISEQ:
816 return me->def->body.iseq.
iseqptr;
825 switch (me->def->type) {
826 case VM_METHOD_TYPE_ISEQ:
827 return me->def->body.iseq.
cref;
833#if VM_CHECK_MODE == 0
837check_cref(
VALUE obj,
int can_be_svar)
839 if (obj ==
Qfalse)
return NULL;
842 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
845 switch (imemo_type(obj)) {
856 rb_bug(
"check_method_entry: svar should not be there:");
863vm_env_cref(
const VALUE *ep)
867 while (!VM_ENV_LOCAL_P(ep)) {
868 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
869 ep = VM_ENV_PREV_EP(ep);
872 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
876is_cref(
const VALUE v,
int can_be_svar)
879 switch (imemo_type(v)) {
892vm_env_cref_by_cref(
const VALUE *ep)
894 while (!VM_ENV_LOCAL_P(ep)) {
895 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
896 ep = VM_ENV_PREV_EP(ep);
898 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
902cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
904 const VALUE v = *vptr;
908 switch (imemo_type(v)) {
911 new_cref = vm_cref_dup(cref);
916 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
921 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
925 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
934vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
936 if (vm_env_cref_by_cref(ep)) {
940 while (!VM_ENV_LOCAL_P(ep)) {
941 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
942 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
945 ep = VM_ENV_PREV_EP(ep);
947 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
948 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
951 rb_bug(
"vm_cref_dup: unreachable");
956vm_get_cref(
const VALUE *ep)
964 rb_bug(
"vm_get_cref: unreachable");
969rb_vm_get_cref(
const VALUE *ep)
971 return vm_get_cref(ep);
982 return vm_get_cref(cfp->ep);
986vm_get_const_key_cref(
const VALUE *ep)
992 if (CREF_DYNAMIC(cref) ||
993 RCLASS_CLONED_P(CREF_CLASS(cref))) {
996 cref = CREF_NEXT(cref);
1009 #define ADD_NEW_CREF(new_cref) \
1010 if (new_cref_tail) { \
1011 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
1014 new_cref_head = new_cref; \
1016 new_cref_tail = new_cref;
1020 if (CREF_CLASS(cref) == old_klass) {
1021 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
1022 ADD_NEW_CREF(new_cref);
1023 return new_cref_head;
1025 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
1026 cref = CREF_NEXT(cref);
1027 ADD_NEW_CREF(new_cref);
1033 return new_cref_head;
1042 prev_cref = vm_env_cref(ep);
1048 prev_cref = vm_env_cref(cfp->ep);
1052 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1056vm_get_cbase(
const VALUE *ep)
1058 const rb_cref_t *cref = vm_get_cref(ep);
1060 return CREF_CLASS_FOR_DEFINITION(cref);
1064vm_get_const_base(
const VALUE *ep)
1066 const rb_cref_t *cref = vm_get_cref(ep);
1069 if (!CREF_PUSHED_BY_EVAL(cref)) {
1070 return CREF_CLASS_FOR_DEFINITION(cref);
1072 cref = CREF_NEXT(cref);
1079vm_check_if_namespace(
VALUE klass)
1082 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1087vm_ensure_not_refinement_module(
VALUE self)
1090 rb_warn(
"not defined at the refinement, but at the outer class/module");
1106 if (
NIL_P(orig_klass) && allow_nil) {
1108 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1112 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1113 root_cref = CREF_NEXT(root_cref);
1116 while (cref && CREF_NEXT(cref)) {
1117 if (CREF_PUSHED_BY_EVAL(cref)) {
1121 klass = CREF_CLASS(cref);
1123 cref = CREF_NEXT(cref);
1125 if (!
NIL_P(klass)) {
1129 if ((ce = rb_const_lookup(klass,
id))) {
1130 rb_const_warn_if_deprecated(ce, klass,
id);
1133 if (am == klass)
break;
1135 if (is_defined)
return 1;
1136 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1138 goto search_continue;
1145 if (UNLIKELY(!rb_ractor_main_p())) {
1147 rb_raise(rb_eRactorIsolationError,
1148 "can not access non-shareable objects in constant %"PRIsVALUE
"::%"PRIsVALUE
" by non-main ractor.",
rb_class_path(klass), rb_id2str(
id));
1159 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1160 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1174 vm_check_if_namespace(orig_klass);
1176 return rb_public_const_defined_from(orig_klass,
id);
1179 return rb_public_const_get_from(orig_klass,
id);
1187 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1195 int allow_nil = TRUE;
1196 if (segments[0] == idNULL) {
1201 while (segments[idx]) {
1202 ID id = segments[idx++];
1203 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1216 rb_bug(
"vm_get_cvar_base: no cref");
1219 while (CREF_NEXT(cref) &&
1220 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1221 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1222 cref = CREF_NEXT(cref);
1224 if (top_level_raise && !CREF_NEXT(cref)) {
1228 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1236ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1238fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1241 vm_cc_attr_index_set(cc, index, shape_id);
1244 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1248#define ractor_incidental_shareable_p(cond, val) \
1249 (!(cond) || rb_ractor_shareable_p(val))
1250#define ractor_object_incidental_shareable_p(obj, val) \
1251 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1260 return default_value;
1270 if (UNLIKELY(!rb_ractor_main_p())) {
1278 if (default_value ==
Qundef) {
1286 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1290 fields_obj = rb_obj_fields(obj,
id);
1294 return default_value;
1299 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1300 VALUE *ivar_list = rb_imemo_fields_ptr(fields_obj);
1302 shape_id_t cached_id;
1306 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1309 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1312 if (LIKELY(cached_id == shape_id)) {
1315 if (index == ATTR_INDEX_NOT_SET) {
1316 return default_value;
1319 val = ivar_list[index];
1320#if USE_DEBUG_COUNTER
1321 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1324 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1330#if USE_DEBUG_COUNTER
1332 if (cached_id != INVALID_SHAPE_ID) {
1333 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1336 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1340 if (cached_id != INVALID_SHAPE_ID) {
1341 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1344 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1347 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1350 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1354 if (UNLIKELY(rb_shape_too_complex_p(shape_id))) {
1358 RUBY_ASSERT(table == rb_imemo_fields_complex_tbl(fields_obj));
1360 if (!st_lookup(table,
id, &val)) {
1361 val = default_value;
1365 shape_id_t previous_cached_id = cached_id;
1366 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1369 if (cached_id != previous_cached_id) {
1370 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1373 if (index == ATTR_INDEX_NOT_SET) {
1374 val = default_value;
1378 val = ivar_list[index];
1384 vm_cc_attr_index_initialize(cc, shape_id);
1387 vm_ic_attr_index_initialize(ic, shape_id);
1390 val = default_value;
1395 if (!UNDEF_P(default_value)) {
1403 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1406 return rb_attr_get(obj,
id);
1414populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1416 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1420 vm_cc_attr_index_set(cc, index, next_shape_id);
1423 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1435 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1437 rb_check_frozen(obj);
1439 attr_index_t index = rb_ivar_set_index(obj,
id, val);
1440 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1442 if (!rb_shape_too_complex_p(next_shape_id)) {
1443 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1446 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1456 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1462 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1465NOINLINE(
static VALUE vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1467vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1469 if (UNLIKELY(!rb_ractor_main_p())) {
1473 VALUE fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1474 if (UNLIKELY(!fields_obj)) {
1478 shape_id_t shape_id = RBASIC_SHAPE_ID(fields_obj);
1481 if (shape_id == dest_shape_id) {
1482 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1484 else if (dest_shape_id != INVALID_SHAPE_ID) {
1485 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1486 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1496 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1498 if (shape_id != dest_shape_id) {
1499 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1500 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1503 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1508NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1510vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1512 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1515 if (shape_id == dest_shape_id) {
1516 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1518 else if (dest_shape_id != INVALID_SHAPE_ID) {
1519 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1520 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1530 VALUE fields_obj = rb_obj_fields(obj,
id);
1532 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1534 if (shape_id != dest_shape_id) {
1535 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1536 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1539 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1545vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1553 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1554 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1556 if (LIKELY(shape_id == dest_shape_id)) {
1557 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1560 else if (dest_shape_id != INVALID_SHAPE_ID) {
1561 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1562 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1564 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1566 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1581 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1582 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1588 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1600 VALUE defined_class = 0;
1604 defined_class =
RBASIC(defined_class)->klass;
1607 VALUE rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1609 rb_bug(
"the cvc table should be set");
1613 if (!rb_marked_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1614 rb_bug(
"should have cvar cache entry");
1619 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1632 cref = vm_get_cref(GET_EP());
1634 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1635 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1637 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1643 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1645 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1651 return vm_getclassvariable(iseq, cfp,
id, ic);
1658 cref = vm_get_cref(GET_EP());
1660 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1661 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1663 rb_class_ivar_set(ic->entry->class_value,
id, val);
1667 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1671 update_classvariable_cache(iseq, klass,
id, cref, ic);
1677 vm_setclassvariable(iseq, cfp,
id, val, ic);
1684 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1695 shape_id_t dest_shape_id;
1697 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1699 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1705 if (!UNDEF_P(vm_setivar_class(obj,
id, val, dest_shape_id, index))) {
1710 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1714 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1721 vm_setinstancevariable(iseq, obj,
id, val, ic);
1727 return vm_getinstancevariable(iseq, obj,
id, ic);
1736 ec->tag->state = RUBY_TAG_FATAL;
1739 ec->tag->state = TAG_THROW;
1741 else if (THROW_DATA_P(err)) {
1742 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1745 ec->tag->state = TAG_RAISE;
1752 const int flag,
const VALUE throwobj)
1760 else if (state == TAG_BREAK) {
1762 const VALUE *ep = GET_EP();
1763 const rb_iseq_t *base_iseq = GET_ISEQ();
1764 escape_cfp = reg_cfp;
1766 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1767 if (ISEQ_BODY(CFP_ISEQ(escape_cfp))->
type == ISEQ_TYPE_CLASS) {
1768 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1769 ep = escape_cfp->ep;
1770 base_iseq = CFP_ISEQ(escape_cfp);
1773 ep = VM_ENV_PREV_EP(ep);
1774 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1775 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1776 VM_ASSERT(CFP_ISEQ(escape_cfp) == base_iseq);
1780 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1786 ep = VM_ENV_PREV_EP(ep);
1788 while (escape_cfp < eocfp) {
1789 if (escape_cfp->ep == ep) {
1790 const rb_iseq_t *
const iseq = CFP_ISEQ(escape_cfp);
1791 const VALUE epc = CFP_PC(escape_cfp) - ISEQ_BODY(iseq)->iseq_encoded;
1796 for (i=0; i < ct->size; i++) {
1798 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1800 if (entry->type == CATCH_TYPE_BREAK &&
1801 entry->iseq == base_iseq &&
1802 entry->start < epc && entry->end >= epc) {
1803 if (entry->cont == epc) {
1812 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1817 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1820 else if (state == TAG_RETRY) {
1821 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1823 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1825 else if (state == TAG_RETURN) {
1826 const VALUE *current_ep = GET_EP();
1827 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1828 int in_class_frame = 0;
1830 escape_cfp = reg_cfp;
1833 while (!VM_ENV_LOCAL_P(ep)) {
1834 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1837 ep = VM_ENV_PREV_EP(ep);
1841 while (escape_cfp < eocfp) {
1842 const VALUE *lep = VM_CF_LEP(escape_cfp);
1848 if (lep == target_lep &&
1849 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1850 ISEQ_BODY(CFP_ISEQ(escape_cfp))->
type == ISEQ_TYPE_CLASS) {
1855 if (lep == target_lep) {
1856 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1858 if (in_class_frame) {
1863 const VALUE *tep = current_ep;
1865 while (target_lep != tep) {
1866 if (escape_cfp->ep == tep) {
1868 if (tep == target_ep) {
1872 goto unexpected_return;
1875 tep = VM_ENV_PREV_EP(tep);
1879 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1880 switch (ISEQ_BODY(CFP_ISEQ(escape_cfp))->
type) {
1882 case ISEQ_TYPE_MAIN:
1884 if (in_class_frame)
goto unexpected_return;
1885 if (target_ep == NULL) {
1889 goto unexpected_return;
1893 case ISEQ_TYPE_EVAL: {
1894 const rb_iseq_t *is = CFP_ISEQ(escape_cfp);
1895 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1896 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1897 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1898 t = ISEQ_BODY(is)->type;
1900 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1903 case ISEQ_TYPE_CLASS:
1912 if (escape_cfp->ep == target_lep && ISEQ_BODY(CFP_ISEQ(escape_cfp))->
type == ISEQ_TYPE_METHOD) {
1913 if (target_ep == NULL) {
1917 goto unexpected_return;
1921 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1924 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1930 rb_bug(
"isns(throw): unsupported throw type");
1933 ec->tag->state = state;
1934 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1939 rb_num_t throw_state,
VALUE throwobj)
1941 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1942 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1945 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1948 return vm_throw_continue(ec, throwobj);
1955 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1961 int is_splat = flag & 0x01;
1964 const VALUE obj = ary;
1976 if (num + is_splat == 0) {
1979 else if (flag & 0x02) {
1984 for (i = 0; i < num -
len; i++) {
1989 for (j = 0; i < num; i++, j++) {
2011 for (; i < num -
len; i++) {
2015 for (rb_num_t j = 0; i < num; i++, j++) {
2016 *cfp->sp++ = ptr[
len - j - 1];
2020 for (rb_num_t j = 0; j < num; j++) {
2021 *cfp->sp++ = ptr[num - j - 1];
2036 int initial_capa = 2;
2038#if VM_CHECK_MODE > 0
2039 ccs->debug_sig = ~(
VALUE)ccs;
2041 ccs->capa = initial_capa;
2046 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2054 if (! vm_cc_markable(cc)) {
2058 if (UNLIKELY(ccs->len == ccs->capa)) {
2061 ccs = ruby_xrealloc(ccs, vm_ccs_alloc_size(ccs->capa));
2062#if VM_CHECK_MODE > 0
2063 ccs->debug_sig = ~(
VALUE)ccs;
2066 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2068 VM_ASSERT(ccs->len < ccs->capa);
2070 const int pos = ccs->len++;
2071 ccs->entries[pos].argc = vm_ci_argc(ci);
2072 ccs->entries[pos].flag = vm_ci_flag(ci);
2075 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2081#if VM_CHECK_MODE > 0
2085 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2086 for (
int i=0; i<ccs->len; i++) {
2087 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2088 ccs->entries[i].flag,
2089 ccs->entries[i].argc);
2090 rp(ccs->entries[i].cc);
2097 VM_ASSERT(vm_ccs_p(ccs));
2098 VM_ASSERT(ccs->len <= ccs->capa);
2100 for (
int i=0; i<ccs->len; i++) {
2103 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2104 VM_ASSERT(vm_cc_class_check(cc, klass));
2105 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2106 VM_ASSERT(!vm_cc_super_p(cc));
2107 VM_ASSERT(!vm_cc_refinement_p(cc));
2118 ASSERT_vm_locking();
2120 if (rb_multi_ractor_p()) {
2121 if (RCLASS_WRITABLE_CC_TBL(klass) != cc_tbl) {
2128 rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj);
2131 if (!ccs || !METHOD_ENTRY_INVALIDATED(ccs->cme)) {
2136 VALUE new_table = rb_vm_cc_table_dup(cc_tbl);
2137 rb_vm_cc_table_delete(new_table, mid);
2138 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), new_table);
2141 rb_vm_cc_table_delete(cc_tbl, mid);
2148 ASSERT_vm_locking();
2150 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2154 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2158 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2159 return &vm_empty_cc;
2162 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2163 const VALUE original_cc_table = cc_tbl;
2166 cc_tbl = rb_vm_cc_table_create(1);
2168 else if (rb_multi_ractor_p()) {
2169 cc_tbl = rb_vm_cc_table_dup(cc_tbl);
2172 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2181 if (UNLIKELY(rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj))) {
2186 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2190 cme = rb_check_overloaded_cme(cme, ci);
2192 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2193 vm_ccs_push(cc_tbl, mid, ccs, ci, cc);
2195 VM_ASSERT(vm_cc_cme(cc) != NULL);
2196 VM_ASSERT(cme->called_id == mid);
2197 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2199 if (original_cc_table != cc_tbl) {
2200 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), cc_tbl);
2212 cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
2220 if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj)) {
2222 const int ccs_len = ccs->len;
2224 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2226 vm_evict_cc(klass, cc_tbl, mid);
2231 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2236 unsigned int argc = vm_ci_argc(ci);
2237 unsigned int flag = vm_ci_flag(ci);
2239 for (
int i=0; i<ccs_len; i++) {
2240 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2241 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2242 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2244 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2246 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2247 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2249 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2250 VM_ASSERT(ccs_cc->klass == klass);
2251 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2267 const ID mid = vm_ci_mid(ci);
2269 const struct rb_callcache *cc = vm_lookup_cc(klass, ci, mid);
2275 if (rb_multi_ractor_p()) {
2278 cc = vm_lookup_cc(klass, ci, mid);
2282 cc = vm_populate_cc(klass, ci, mid);
2296 cc = vm_search_cc(klass, ci);
2299 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2300 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2301 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2302 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2303 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2311#if USE_DEBUG_COUNTER
2315 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2317#if OPT_INLINE_METHOD_CACHE
2321 if (cd_owner && cc != empty_cc) {
2325#if USE_DEBUG_COUNTER
2326 if (!old_cc || old_cc == empty_cc) {
2328 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2330 else if (old_cc == cc) {
2331 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2333 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2334 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2336 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2337 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2338 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2341 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2346 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2347 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2358#if OPT_INLINE_METHOD_CACHE
2359 if (LIKELY(vm_cc_class_check(cc, klass))) {
2360 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2361 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2362 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2363 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2364 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2365 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2369 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2372 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2376 return vm_search_method_slowpath0((
VALUE)CFP_ISEQ(reg_cfp), cd, klass);
2383 VM_ASSERT(klass !=
Qfalse);
2386 const struct rb_callcache *cc = vm_search_method_fastpath(reg_cfp, cd, klass);
2387 return vm_cc_cme(cc);
2396 const struct rb_callcache *cc = vm_search_method_slowpath0(cd_owner, cd, klass);
2397 return vm_cc_cme(cc);
2400#if __has_attribute(transparent_union)
2413 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2414 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2415 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2416 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2417 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2418 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2421# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2424# define make_cfunc_type(f) (cfunc_type)(f)
2434 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2435 VM_ASSERT(callable_method_entry_p(me));
2437 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2441#if __has_attribute(transparent_union)
2442 return me->def->body.cfunc.func == func.anyargs;
2444 return me->def->body.cfunc.func == func;
2453 return me && METHOD_ENTRY_BASIC(me);
2459 VM_ASSERT(reg_cfp != NULL);
2461 return check_cfunc(cme, func);
2467 return check_cfunc(me, func);
2476 const struct rb_callcache *cc = vm_search_method_slowpath0((
VALUE)iseq, cd, klass);
2478 return check_cfunc(cme, func);
2481#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2482#define vm_method_cfunc_is(reg_cfp, cd, recv, func) vm_method_cfunc_is(reg_cfp, cd, recv, make_cfunc_type(func))
2484#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2516opt_equality_specialized(
VALUE recv,
VALUE obj)
2518 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2519 goto compare_by_identity;
2521 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2522 goto compare_by_identity;
2525 goto compare_by_identity;
2534 return RBOOL(a == b);
2541 return rb_str_eql_internal(obj, recv);
2546 compare_by_identity:
2547 return RBOOL(recv == obj);
2553 VM_ASSERT(reg_cfp != NULL);
2555 VALUE val = opt_equality_specialized(recv, obj);
2556 if (!UNDEF_P(val))
return val;
2558 if (!vm_method_cfunc_is(reg_cfp, cd, recv, rb_obj_equal)) {
2562 return RBOOL(recv == obj);
2566#undef EQ_UNREDEFINED_P
2569NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2572opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2574 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2576 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2577 return RBOOL(recv == obj);
2587 VALUE val = opt_equality_specialized(recv, obj);
2588 if (!UNDEF_P(val)) {
2592 return opt_equality_by_mid_slowpath(recv, obj, mid);
2599 return opt_equality_by_mid(obj1, obj2, idEq);
2605 return opt_equality_by_mid(obj1, obj2, idEqlP);
2615 case VM_CHECKMATCH_TYPE_WHEN:
2617 case VM_CHECKMATCH_TYPE_RESCUE:
2619 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2622 case VM_CHECKMATCH_TYPE_CASE: {
2623 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2626 rb_bug(
"check_match: unreachable");
2632double_cmp_lt(
double a,
double b)
2634 return RBOOL(a < b);
2638double_cmp_le(
double a,
double b)
2640 return RBOOL(a <= b);
2644double_cmp_gt(
double a,
double b)
2646 return RBOOL(a > b);
2650double_cmp_ge(
double a,
double b)
2652 return RBOOL(a >= b);
2656static inline VALUE *
2661 if (CFP_ISEQ(cfp) && VM_FRAME_RUBYFRAME_P(cfp)) {
2662 VALUE *bp = prev_cfp->sp + ISEQ_BODY(CFP_ISEQ(cfp))->local_table_size + VM_ENV_DATA_SIZE;
2664 if (ISEQ_BODY(CFP_ISEQ(cfp))->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2665 int lts = ISEQ_BODY(CFP_ISEQ(cfp))->local_table_size;
2666 int params = ISEQ_BODY(CFP_ISEQ(cfp))->param.size;
2669 bp += vm_ci_argc(ci);
2672 if (ISEQ_BODY(CFP_ISEQ(cfp))->
type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2676#if VM_DEBUG_BP_CHECK
2677 if (bp != cfp->bp_check) {
2678 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2679 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2680 (
long)(bp - GET_EC()->vm_stack));
2681 rb_bug(
"vm_base_ptr: unreachable");
2694 return vm_base_ptr(cfp);
2709static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2714 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2716 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2722 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2725 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2726 int param = ISEQ_BODY(iseq)->param.size;
2727 int local = ISEQ_BODY(iseq)->local_table_size;
2728 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2734 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2735 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2736 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2737 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2738 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2739 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2740 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2741 ISEQ_BODY(iseq)->param.flags.has_block == FALSE &&
2742 ISEQ_BODY(iseq)->param.flags.accepts_no_block == FALSE;
2746rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2748 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2749 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2750 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2751 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2752 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2753 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2754 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2755 ISEQ_BODY(iseq)->param.flags.has_block == FALSE &&
2756 ISEQ_BODY(iseq)->param.flags.accepts_no_block == FALSE;
2760rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2762 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2763 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2764 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2765 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2766 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2767 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2768 ISEQ_BODY(iseq)->param.flags.has_block == FALSE &&
2769 ISEQ_BODY(iseq)->param.flags.accepts_no_block == FALSE;
2772#define ALLOW_HEAP_ARGV (-2)
2773#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2778 vm_check_canary(GET_EC(), cfp->sp);
2784 int argc = calling->argc;
2786 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2790 VALUE *argv = cfp->sp - argc;
2794 cfp->sp -= argc - 1;
2795 cfp->sp[-1] = argv_ary;
2797 calling->heap_argv = argv_ary;
2803 if (max_args >= 0 &&
len + argc > max_args) {
2811 calling->argc +=
len - (max_args - argc + 1);
2812 len = max_args - argc + 1;
2821 calling->heap_argv = 0;
2823 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2825 for (i = 0; i <
len; i++) {
2826 *cfp->sp++ = ptr[i];
2838 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2839 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2840 const VALUE h = rb_hash_new_with_size(kw_len);
2841 VALUE *sp = cfp->sp;
2844 for (i=0; i<kw_len; i++) {
2845 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2849 cfp->sp -= kw_len - 1;
2850 calling->argc -= kw_len - 1;
2851 calling->kw_splat = 1;
2855vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2858 if (keyword_hash !=
Qnil) {
2860 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2863 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2869 keyword_hash = rb_hash_dup(keyword_hash);
2871 return keyword_hash;
2877 const struct rb_callinfo *restrict ci,
int max_args)
2879 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2880 if (IS_ARGS_KW_SPLAT(ci)) {
2882 VM_ASSERT(calling->kw_splat == 1);
2886 VALUE ary = cfp->sp[0];
2887 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2890 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2894 if (UNLIKELY(calling->heap_argv)) {
2896 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2897 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2898 calling->kw_splat = 0;
2906 VM_ASSERT(calling->kw_splat == 1);
2910 calling->kw_splat = 0;
2915 VM_ASSERT(calling->kw_splat == 0);
2919 VALUE ary = cfp->sp[0];
2921 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2926 VALUE last_hash, argv_ary;
2927 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2928 if (!IS_ARGS_KEYWORD(ci) &&
2931 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2936 calling->kw_splat = 1;
2942 if (!IS_ARGS_KEYWORD(ci) &&
2943 calling->argc > 0 &&
2945 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2952 cfp->sp[-1] = rb_hash_dup(last_hash);
2953 calling->kw_splat = 1;
2959 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2961 VM_ASSERT(calling->kw_splat == 1);
2962 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2967 calling->kw_splat = 0;
2973 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2975 VM_ASSERT(calling->kw_splat == 0);
2981 vm_caller_setup_arg_kw(cfp, calling, ci);
2985#define USE_OPT_HIST 0
2988#define OPT_HIST_MAX 64
2989static int opt_hist[OPT_HIST_MAX+1];
2993opt_hist_show_results_at_exit(
void)
2995 for (
int i=0; i<OPT_HIST_MAX; i++) {
2996 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
3006 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3007 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3008 const int opt = calling->argc - lead_num;
3009 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3010 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3011 const int param = ISEQ_BODY(iseq)->param.size;
3012 const int local = ISEQ_BODY(iseq)->local_table_size;
3013 const int delta = opt_num - opt;
3015 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3018 if (opt_pc < OPT_HIST_MAX) {
3022 opt_hist[OPT_HIST_MAX]++;
3026 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
3034 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3035 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3036 const int opt = calling->argc - lead_num;
3037 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3039 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3042 if (opt_pc < OPT_HIST_MAX) {
3046 opt_hist[OPT_HIST_MAX]++;
3050 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3055 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
3056 VALUE *
const locals);
3063 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3064 int param_size = ISEQ_BODY(iseq)->param.size;
3065 int local_size = ISEQ_BODY(iseq)->local_table_size;
3068 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3070 local_size = local_size + vm_ci_argc(calling->cd->ci);
3071 param_size = param_size + vm_ci_argc(calling->cd->ci);
3073 cfp->sp[0] = (
VALUE)calling->cd->ci;
3075 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
3085 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
3086 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
3088 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3089 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3091 const int ci_kw_len = kw_arg->keyword_len;
3092 const VALUE *
const ci_keywords = kw_arg->keywords;
3093 VALUE *argv = cfp->sp - calling->argc;
3094 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3095 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3097 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3098 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3100 int param = ISEQ_BODY(iseq)->param.size;
3101 int local = ISEQ_BODY(iseq)->local_table_size;
3102 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3109 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
3112 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
3113 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3115 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3116 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3117 VALUE *
const argv = cfp->sp - calling->argc;
3118 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3121 for (i=0; i<kw_param->num; i++) {
3122 klocals[i] = kw_param->default_values[i];
3129 int param = ISEQ_BODY(iseq)->param.size;
3130 int local = ISEQ_BODY(iseq)->local_table_size;
3131 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3141 cfp->sp -= (calling->argc + 1);
3142 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3143 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3152 set_table *dup_check_table = &vm->unused_block_warning_table;
3162 .v = (
VALUE)cme->def,
3166 if (!strict_unused_block) {
3167 key = (st_data_t)cme->def->original_id;
3169 if (set_table_lookup(dup_check_table, key)) {
3179 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3184 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3185 fprintf(stderr,
"key:%p\n", (
void *)key);
3189 if (set_insert(dup_check_table, key)) {
3194 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3196 if (!
NIL_P(m_loc)) {
3197 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3201 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3208 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3213 VM_ASSERT((vm_ci_argc(ci), 1));
3214 VM_ASSERT(vm_cc_cme(cc) != NULL);
3216 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3217 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3218 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3219 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)CFP_PC(ec->cfp));
3222 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3223 if (LIKELY(rb_simple_iseq_p(iseq))) {
3225 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3226 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3228 if (calling->argc != lead_num) {
3229 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3233 VM_ASSERT(cc == calling->cc);
3235 if (vm_call_iseq_optimizable_p(ci, cc)) {
3236 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) && ruby_vm_c_events_enabled == 0) {
3237 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3238 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3239 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3242 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3247 else if (rb_iseq_only_optparam_p(iseq)) {
3250 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3251 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3253 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3254 const int argc = calling->argc;
3255 const int opt = argc - lead_num;
3257 if (opt < 0 || opt > opt_num) {
3258 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3261 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3262 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3263 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3264 vm_call_cacheable(ci, cc));
3267 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3268 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3269 vm_call_cacheable(ci, cc));
3273 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3274 for (
int i=argc; i<lead_num + opt_num; i++) {
3277 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3279 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3280 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3281 const int argc = calling->argc;
3282 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3284 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3287 if (argc - kw_arg->keyword_len == lead_num) {
3288 const int ci_kw_len = kw_arg->keyword_len;
3289 const VALUE *
const ci_keywords = kw_arg->keywords;
3291 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3293 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3294 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3296 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3297 vm_call_cacheable(ci, cc));
3302 else if (argc == lead_num) {
3304 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3305 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3307 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3309 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3310 vm_call_cacheable(ci, cc));
3336 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3337 bool can_fastpath =
true;
3339 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3341 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3342 ci = vm_ci_new_runtime(
3349 ci = forward_cd->caller_ci;
3351 can_fastpath =
false;
3355 if (!vm_ci_markable(ci)) {
3356 ci = vm_ci_new_runtime(
3361 can_fastpath =
false;
3363 argv[param_size - 1] = (
VALUE)ci;
3364 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3368 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3395 const VALUE * lep = VM_CF_LEP(cfp);
3401 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3406 iseq = CFP_ISEQ(rb_vm_search_cf_from_ep(ec, cfp, lep));
3410 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3412 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3413 VALUE * to = cfp->sp - 1;
3417 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3422 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3424 cfp->sp = to + argc;
3443 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3446 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3447 int param_size = ISEQ_BODY(iseq)->param.size;
3448 int local_size = ISEQ_BODY(iseq)->local_table_size;
3450 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3452 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3453 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3459 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3462 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3463 int param_size = ISEQ_BODY(iseq)->param.size;
3464 int local_size = ISEQ_BODY(iseq)->local_table_size;
3466 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3469 local_size = local_size + vm_ci_argc(calling->cd->ci);
3470 param_size = param_size + vm_ci_argc(calling->cd->ci);
3472 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3473 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3478 int opt_pc,
int param_size,
int local_size)
3483 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3484 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3487 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3493 int opt_pc,
int param_size,
int local_size)
3495 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3496 VALUE *argv = cfp->sp - calling->argc;
3497 VALUE *sp = argv + param_size;
3498 cfp->sp = argv - 1 ;
3500 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3501 calling->block_handler, (
VALUE)me,
3502 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3503 local_size - param_size,
3504 ISEQ_BODY(iseq)->stack_max);
3513 VALUE *argv = cfp->sp - calling->argc;
3515 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3516 VALUE *src_argv = argv;
3517 VALUE *sp_orig, *sp;
3518 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3520 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3521 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3522 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3523 dst_captured->code.val = src_captured->code.val;
3524 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3525 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3528 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3532 vm_pop_frame(ec, cfp, cfp->ep);
3535 sp_orig = sp = cfp->sp;
3538 sp[0] = calling->recv;
3542 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3543 *sp++ = src_argv[i];
3546 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3547 calling->recv, calling->block_handler, (
VALUE)me,
3548 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3549 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3550 ISEQ_BODY(iseq)->stack_max);
3558ractor_unsafe_check(
void)
3560 if (!rb_ractor_main_p()) {
3561 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3568 ractor_unsafe_check();
3576 ractor_unsafe_check();
3578 return (*f)(argc, argv, recv);
3584 ractor_unsafe_check();
3592 ractor_unsafe_check();
3594 return (*f)(recv, argv[0]);
3600 ractor_unsafe_check();
3602 return (*f)(recv, argv[0], argv[1]);
3608 ractor_unsafe_check();
3610 return (*f)(recv, argv[0], argv[1], argv[2]);
3616 ractor_unsafe_check();
3618 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3624 ractor_unsafe_check();
3625 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3626 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3632 ractor_unsafe_check();
3633 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3634 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3640 ractor_unsafe_check();
3641 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3642 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3648 ractor_unsafe_check();
3649 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3650 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3656 ractor_unsafe_check();
3657 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3658 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3664 ractor_unsafe_check();
3665 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3666 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3672 ractor_unsafe_check();
3673 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3674 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3680 ractor_unsafe_check();
3681 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3682 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3688 ractor_unsafe_check();
3689 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3690 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3696 ractor_unsafe_check();
3697 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3698 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3704 ractor_unsafe_check();
3705 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3706 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3720 return (*f)(argc, argv, recv);
3734 return (*f)(recv, argv[0]);
3741 return (*f)(recv, argv[0], argv[1]);
3748 return (*f)(recv, argv[0], argv[1], argv[2]);
3755 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3761 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3762 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3768 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3769 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3775 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3776 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3782 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3783 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3789 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3790 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3796 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3797 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3803 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3804 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3810 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3811 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3817 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3818 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3824 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3825 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3831 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3832 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3838 const int ov_flags = RAISED_STACKOVERFLOW;
3839 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3840 if (rb_ec_raised_p(ec, ov_flags)) {
3841 rb_ec_raised_reset(ec, ov_flags);
3847#define CHECK_CFP_CONSISTENCY(func) \
3848 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3849 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3855#if VM_DEBUG_VERIFY_METHOD_CACHE
3856 switch (me->def->type) {
3857 case VM_METHOD_TYPE_CFUNC:
3858 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3860# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3862 METHOD_BUG(ATTRSET);
3864 METHOD_BUG(BMETHOD);
3867 METHOD_BUG(OPTIMIZED);
3868 METHOD_BUG(MISSING);
3869 METHOD_BUG(REFINED);
3873 rb_bug(
"wrong method type: %d", me->def->type);
3876 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3883 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3890 VALUE recv = calling->recv;
3891 VALUE block_handler = calling->block_handler;
3892 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3894 if (UNLIKELY(calling->kw_splat)) {
3895 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3898 VM_ASSERT(reg_cfp == ec->cfp);
3900 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3903 vm_push_frame(ec, NULL, frame_type, recv,
3904 block_handler, (
VALUE)me,
3905 0, ec->cfp->sp, 0, 0);
3907 int len = cfunc->argc;
3910 reg_cfp->sp = stack_bottom;
3911 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3913 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3915 rb_vm_pop_frame(ec);
3917 VM_ASSERT(ec->cfp->sp == stack_bottom);
3919 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3920 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3930 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3932 VALUE *sp = ec->cfp->sp;
3933 VALUE recv = *(sp - recv_idx - 1);
3934 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3935 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3936#if VM_CHECK_MODE > 0
3938 *(GET_EC()->cfp->sp) =
Qfalse;
3940 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3945rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3947 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3953 int argc = calling->argc;
3954 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3955 VALUE *argv = &stack_bottom[1];
3957 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3964 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3966 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3968 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3969 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3972 VALUE *stack_bottom = reg_cfp->sp - 2;
3974 VM_ASSERT(calling->argc == 1);
3978 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3981 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3983 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3990 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3993 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3994 return vm_call_cfunc_other(ec, reg_cfp, calling);
3998 calling->kw_splat = 0;
4000 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
4001 VALUE *sp = stack_bottom;
4002 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
4003 for(i = 0; i < argc; i++) {
4008 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
4014 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
4015 VALUE argv_ary = reg_cfp->sp[-1];
4019 int argc_offset = 0;
4021 if (UNLIKELY(argc > 0 &&
4023 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
4025 return vm_call_cfunc_other(ec, reg_cfp, calling);
4029 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
4035 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
4036 VALUE keyword_hash = reg_cfp->sp[-1];
4039 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
4042 return vm_call_cfunc_other(ec, reg_cfp, calling);
4049 RB_DEBUG_COUNTER_INC(ccf_cfunc);
4051 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4052 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
4054 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
4055 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
4057 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
4059 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
4060 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
4064 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
4065 return vm_call_cfunc_other(ec, reg_cfp, calling);
4072 RB_DEBUG_COUNTER_INC(ccf_ivar);
4074 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
4081 RB_DEBUG_COUNTER_INC(ccf_attrset);
4082 VALUE val = *(cfp->sp - 1);
4085 shape_id_t dest_shape_id;
4086 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
4087 ID id = vm_cc_cme(cc)->def->body.attr.id;
4088 rb_check_frozen(obj);
4089 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
4097 res = vm_setivar_class(obj,
id, val, dest_shape_id, index);
4098 if (!UNDEF_P(res)) {
4105 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
4106 if (!UNDEF_P(res)) {
4111 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
4119 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4129 VALUE procv = cme->def->body.bmethod.proc;
4132 cme->def->body.bmethod.defined_ractor_id != rb_ec_ractor_id(ec)) {
4133 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4137 GetProcPtr(procv, proc);
4138 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4148 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4152 VALUE procv = cme->def->body.bmethod.proc;
4155 cme->def->body.bmethod.defined_ractor_id != rb_ec_ractor_id(ec)) {
4156 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4160 GetProcPtr(procv, proc);
4161 const struct rb_block *block = &proc->block;
4163 while (vm_block_type(block) == block_type_proc) {
4164 block = vm_proc_block(block->as.proc);
4166 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4169 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4170 VALUE *
const argv = cfp->sp - calling->argc;
4171 const int arg_size = ISEQ_BODY(iseq)->param.size;
4174 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4175 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4178 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4183 vm_push_frame(ec, iseq,
4184 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4186 VM_GUARDED_PREV_EP(captured->ep),
4188 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4190 ISEQ_BODY(iseq)->local_table_size - arg_size,
4191 ISEQ_BODY(iseq)->stack_max);
4199 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4203 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4204 if (UNLIKELY(calling->heap_argv)) {
4209 argc = calling->argc;
4212 cfp->sp += - argc - 1;
4215 return vm_call_bmethod_body(ec, calling, argv);
4221 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4225 VALUE procv = cme->def->body.bmethod.proc;
4227 GetProcPtr(procv, proc);
4228 const struct rb_block *block = &proc->block;
4230 while (vm_block_type(block) == block_type_proc) {
4231 block = vm_proc_block(block->as.proc);
4233 if (vm_block_type(block) == block_type_iseq) {
4234 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4235 return vm_call_iseq_bmethod(ec, cfp, calling);
4238 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4239 return vm_call_noniseq_bmethod(ec, cfp, calling);
4243rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4245 VALUE klass = current_class;
4253 while (
RTEST(klass)) {
4255 if (owner == target_owner) {
4261 return current_class;
4270 if (orig_me->defined_class == 0) {
4271 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4272 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4273 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4275 if (me->def->reference_count == 1) {
4276 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4280 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4288 VM_ASSERT(callable_method_entry_p(cme));
4295 return aliased_callable_method_entry(me);
4301 calling->cc = &VM_CC_ON_STACK(
Qundef,
4304 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4306 return vm_call_method_each_type(ec, cfp, calling);
4309static enum method_missing_reason
4312 enum method_missing_reason stat = MISSING_NOENTRY;
4313 if (vm_ci_flag(ci) & VM_CALL_VCALL && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) stat |= MISSING_VCALL;
4314 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4315 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4325 ASSUME(calling->argc >= 0);
4327 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4328 int argc = calling->argc;
4329 VALUE recv = calling->recv;
4332 flags |= VM_CALL_OPT_SEND;
4334 if (UNLIKELY(! mid)) {
4335 mid = idMethodMissing;
4336 missing_reason = ci_missing_reason(ci);
4337 ec->method_missing_reason = missing_reason;
4340 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4341 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4342 rb_ary_unshift(argv_ary, symbol);
4345 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4346 VALUE exc = rb_make_no_method_exception(
4368 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4371 argc = ++calling->argc;
4373 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4376 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4377 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4378 VALUE exc = rb_make_no_method_exception(
4391 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4397 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4398 calling->cd = &new_fcd.cd;
4402 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4403 new_fcd.caller_ci = caller_ci;
4406 calling->cc = &VM_CC_ON_STACK(klass,
4408 { .method_missing_reason = missing_reason },
4409 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4411 if (flags & VM_CALL_FCALL) {
4412 return vm_call_method(ec, reg_cfp, calling);
4416 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4418 if (vm_cc_cme(cc) != NULL) {
4419 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4420 case METHOD_VISI_PUBLIC:
4421 return vm_call_method_each_type(ec, reg_cfp, calling);
4422 case METHOD_VISI_PRIVATE:
4423 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4425 case METHOD_VISI_PROTECTED:
4426 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4429 VM_UNREACHABLE(vm_call_method);
4431 return vm_call_method_missing(ec, reg_cfp, calling);
4434 return vm_call_method_nome(ec, reg_cfp, calling);
4444 i = calling->argc - 1;
4446 if (calling->argc == 0) {
4447 rb_raise(rb_eArgError,
"no method name given");
4471 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4477 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4479 int flags = VM_CALL_FCALL;
4483 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4484 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4486 flags |= VM_CALL_ARGS_SPLAT;
4487 if (calling->kw_splat) {
4488 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4489 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4490 calling->kw_splat = 0;
4492 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4495 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4496 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4502 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4503 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4509 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4512 int flags = vm_ci_flag(ci);
4514 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4515 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4516 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4517 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4518 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4519 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4522 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4523 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4528 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4530 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4532 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4533 unsigned int argc, flag;
4535 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4536 argc = ++calling->argc;
4539 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4540 vm_check_canary(ec, reg_cfp->sp);
4544 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4547 ec->method_missing_reason = reason;
4551 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4557 if (!(flag & VM_CALL_FORWARDING)) {
4558 calling->cd = &new_fcd.cd;
4562 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4563 new_fcd.caller_ci = caller_ci;
4567 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4568 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4569 return vm_call_method(ec, reg_cfp, calling);
4575 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4586 return vm_call_method_nome(ec, cfp, calling);
4588 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4589 cme->def->body.refined.orig_me) {
4590 cme = refined_method_callable_without_refinement(cme);
4593 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4595 return vm_call_method_each_type(ec, cfp, calling);
4599find_refinement(
VALUE refinements,
VALUE klass)
4601 if (
NIL_P(refinements)) {
4604 return rb_hash_lookup(refinements, klass);
4613 if (CFP_ISEQ(cfp) && ISEQ_BODY(CFP_ISEQ(cfp))->
type == ISEQ_TYPE_BLOCK) {
4614 const rb_iseq_t *local_iseq = ISEQ_BODY(CFP_ISEQ(cfp))->local_iseq;
4617 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4618 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4622 }
while (CFP_ISEQ(cfp) != local_iseq);
4633 if (orig_me->defined_class == 0) {
4641 VM_ASSERT(callable_method_entry_p(cme));
4643 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4653 ID mid = vm_ci_mid(calling->cd->ci);
4654 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4658 for (; cref; cref = CREF_NEXT(cref)) {
4659 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4660 if (
NIL_P(refinement))
continue;
4663 rb_callable_method_entry(refinement, mid);
4666 if (vm_cc_call(cc) == vm_call_super_method) {
4669 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4674 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4675 cme->def != ref_me->def) {
4678 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4687 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4688 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4703 if (calling->cd->cc) {
4704 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4706 return vm_call_method(ec, cfp, calling);
4709 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4710 calling->cc= ref_cc;
4711 return vm_call_method(ec, cfp, calling);
4715 return vm_call_method_nome(ec, cfp, calling);
4721NOINLINE(
static VALUE
4729 int argc = calling->argc;
4732 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4735 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4741 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4744 VALUE procval = calling->recv;
4745 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4751 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4753 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4756 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4757 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4760 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4761 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4762 return vm_call_general(ec, reg_cfp, calling);
4769 VALUE recv = calling->recv;
4772 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4773 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4775 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4776 return RSTRUCT_GET_RAW(recv,
off);
4782 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4784 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4792 VALUE recv = calling->recv;
4795 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4796 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4798 rb_check_frozen(recv);
4800 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4801 RSTRUCT_SET_RAW(recv,
off, val);
4809 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4811 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4819#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4820 if (UNLIKELY(ruby_vm_c_events_enabled > 0)) { \
4821 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4822 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4824 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4825 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4836 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4837 case OPTIMIZED_METHOD_TYPE_SEND:
4838 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4839 return vm_call_opt_send(ec, cfp, calling);
4840 case OPTIMIZED_METHOD_TYPE_CALL:
4841 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4842 return vm_call_opt_call(ec, cfp, calling);
4843 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4844 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4845 return vm_call_opt_block_call(ec, cfp, calling);
4846 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4847 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4851 VM_CALL_METHOD_ATTR(v,
4852 vm_call_opt_struct_aref(ec, cfp, calling),
4853 set_vm_cc_ivar(cc); \
4854 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4857 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4858 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4862 VM_CALL_METHOD_ATTR(v,
4863 vm_call_opt_struct_aset(ec, cfp, calling),
4864 set_vm_cc_ivar(cc); \
4865 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4869 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4881 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4883 switch (cme->def->type) {
4884 case VM_METHOD_TYPE_ISEQ:
4885 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4886 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4887 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4890 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4891 return vm_call_iseq_setup(ec, cfp, calling);
4894 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4895 case VM_METHOD_TYPE_CFUNC:
4896 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4897 return vm_call_cfunc(ec, cfp, calling);
4899 case VM_METHOD_TYPE_ATTRSET:
4900 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4904 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4906 if (vm_cc_markable(cc)) {
4907 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4908 VM_CALL_METHOD_ATTR(v,
4909 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4910 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4916 VM_CALLCACHE_UNMARKABLE |
4917 VM_CALLCACHE_ON_STACK,
4923 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4928 VM_CALL_METHOD_ATTR(v,
4929 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4930 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4934 case VM_METHOD_TYPE_IVAR:
4935 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4937 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4938 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4939 VM_CALL_METHOD_ATTR(v,
4940 vm_call_ivar(ec, cfp, calling),
4941 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4944 case VM_METHOD_TYPE_MISSING:
4945 vm_cc_method_missing_reason_set(cc, 0);
4946 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4947 return vm_call_method_missing(ec, cfp, calling);
4949 case VM_METHOD_TYPE_BMETHOD:
4950 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4951 return vm_call_bmethod(ec, cfp, calling);
4953 case VM_METHOD_TYPE_ALIAS:
4954 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4955 return vm_call_alias(ec, cfp, calling);
4957 case VM_METHOD_TYPE_OPTIMIZED:
4958 return vm_call_optimized(ec, cfp, calling, ci, cc);
4960 case VM_METHOD_TYPE_UNDEF:
4963 case VM_METHOD_TYPE_ZSUPER:
4964 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4966 case VM_METHOD_TYPE_REFINED:
4969 return vm_call_refined(ec, cfp, calling);
4972 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4982 const int stat = ci_missing_reason(ci);
4984 if (vm_ci_mid(ci) == idMethodMissing) {
4985 if (UNLIKELY(calling->heap_argv)) {
4990 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4991 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4995 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
5007 VALUE defined_class = me->defined_class;
5008 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
5009 return NIL_P(refined_class) ? defined_class : refined_class;
5018 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
5020 if (vm_cc_cme(cc) != NULL) {
5021 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
5022 case METHOD_VISI_PUBLIC:
5023 return vm_call_method_each_type(ec, cfp, calling);
5025 case METHOD_VISI_PRIVATE:
5026 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
5027 enum method_missing_reason stat = MISSING_PRIVATE;
5028 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
5030 vm_cc_method_missing_reason_set(cc, stat);
5031 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
5032 return vm_call_method_missing(ec, cfp, calling);
5034 return vm_call_method_each_type(ec, cfp, calling);
5036 case METHOD_VISI_PROTECTED:
5037 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
5038 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
5040 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
5041 return vm_call_method_missing(ec, cfp, calling);
5045 VM_ASSERT(vm_cc_cme(cc) != NULL);
5048 calling->cc = &cc_on_stack;
5049 return vm_call_method_each_type(ec, cfp, calling);
5052 return vm_call_method_each_type(ec, cfp, calling);
5055 rb_bug(
"unreachable");
5059 return vm_call_method_nome(ec, cfp, calling);
5066 RB_DEBUG_COUNTER_INC(ccf_general);
5067 return vm_call_method(ec, reg_cfp, calling);
5073 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
5074 VM_ASSERT(cc != vm_cc_empty());
5076 *(vm_call_handler *)&cc->call_ = vm_call_general;
5082 RB_DEBUG_COUNTER_INC(ccf_super_method);
5087 if (ec == NULL) rb_bug(
"unreachable");
5090 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
5091 return vm_call_method(ec, reg_cfp, calling);
5097vm_search_normal_superclass(
VALUE klass)
5102 klass =
RBASIC(klass)->klass;
5104 klass = RCLASS_ORIGIN(klass);
5108NORETURN(
static void vm_super_outside(
void));
5111vm_super_outside(
void)
5117empty_cc_for_super(
void)
5119 return &vm_empty_cc_for_super;
5125 VALUE current_defined_class;
5126 const rb_iseq_t *iseq = CFP_ISEQ(reg_cfp);
5133 current_defined_class = vm_defined_class_for_protected_call(me);
5136 iseq != method_entry_iseqptr(me) &&
5139 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5143 "self has wrong type to call super in this context: "
5144 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5149 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5151 "implicit argument passing of super from method defined"
5152 " by define_method() is not supported."
5153 " Specify all arguments explicitly.");
5156 ID mid = me->def->original_id;
5158 if (!vm_ci_markable(cd->ci)) {
5159 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5163 cd->ci = vm_ci_new_runtime(mid,
5166 vm_ci_kwarg(cd->ci));
5173 VALUE klass = vm_search_normal_superclass(me->defined_class);
5177 cc = vm_cc_new(
Qundef, NULL, vm_call_method_missing, cc_type_super);
5181 cc = vm_search_method_fastpath(reg_cfp, cd, klass);
5185 if (cached_cme == NULL) {
5187 cd->cc = empty_cc_for_super();
5189 else if (cached_cme->called_id != mid) {
5192 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5196 cd->cc = cc = empty_cc_for_super();
5200 switch (cached_cme->def->type) {
5202 case VM_METHOD_TYPE_REFINED:
5204 case VM_METHOD_TYPE_ATTRSET:
5205 case VM_METHOD_TYPE_IVAR:
5206 vm_cc_call_set(cc, vm_call_super_method);
5214 VM_ASSERT((vm_cc_cme(cc),
true));
5222block_proc_is_lambda(
const VALUE procval)
5227 GetProcPtr(procval, proc);
5228 return proc->is_lambda;
5238 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5241 int is_lambda = FALSE;
5242 VALUE val, arg, blockarg;
5244 const struct vm_ifunc *ifunc = captured->code.ifunc;
5249 else if (argc == 0) {
5256 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5258 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5260 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5263 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5266 VM_GUARDED_PREV_EP(captured->ep),
5268 0, ec->cfp->sp, 0, 0);
5269 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5270 rb_vm_pop_frame(ec);
5278 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5284 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5293 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5295 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5303vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5305 VALUE ary, arg0 = argv[0];
5310 VM_ASSERT(argv[0] == arg0);
5318 if (rb_simple_iseq_p(iseq)) {
5322 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5324 if (arg_setup_type == arg_setup_block &&
5325 calling->argc == 1 &&
5326 ISEQ_BODY(iseq)->param.flags.has_lead &&
5327 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5328 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5329 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5332 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5333 if (arg_setup_type == arg_setup_block) {
5334 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5336 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5337 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5338 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5340 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5341 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5345 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5352 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5361 calling = &calling_entry;
5362 calling->argc = argc;
5363 calling->block_handler = block_handler;
5364 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5366 calling->heap_argv = 0;
5368 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5370 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5378 bool is_lambda,
VALUE block_handler)
5381 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5382 const int arg_size = ISEQ_BODY(iseq)->param.size;
5383 VALUE *
const rsp = GET_SP() - calling->argc;
5384 VALUE *
const argv = rsp;
5385 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5386 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5390 vm_push_frame(ec, iseq,
5393 VM_GUARDED_PREV_EP(captured->ep), 0,
5394 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5396 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5404 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5406 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5407 int flags = vm_ci_flag(ci);
5409 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5410 ((calling->argc == 0) ||
5411 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5412 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5413 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5414 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5416 if (UNLIKELY(calling->heap_argv)) {
5417#if VM_ARGC_STACK_MAX < 0
5419 rb_raise(rb_eArgError,
"no receiver given");
5425 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5426 reg_cfp->sp[-2] = calling->recv;
5427 flags |= VM_CALL_ARGS_SPLAT;
5430 if (calling->argc < 1) {
5431 rb_raise(rb_eArgError,
"no receiver given");
5433 calling->recv = TOPN(--calling->argc);
5435 if (calling->kw_splat) {
5436 flags |= VM_CALL_KW_SPLAT;
5440 if (calling->argc < 1) {
5441 rb_raise(rb_eArgError,
"no receiver given");
5443 calling->recv = TOPN(--calling->argc);
5446 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5452 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5457 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5458 argc = calling->argc;
5459 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5465vm_proc_to_block_handler(
VALUE procval)
5467 const struct rb_block *block = vm_proc_block(procval);
5469 switch (vm_block_type(block)) {
5470 case block_type_iseq:
5471 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5472 case block_type_ifunc:
5473 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5474 case block_type_symbol:
5475 return VM_BH_FROM_SYMBOL(block->as.symbol);
5476 case block_type_proc:
5477 return VM_BH_FROM_PROC(block->as.proc);
5479 VM_UNREACHABLE(vm_yield_with_proc);
5486 bool is_lambda,
VALUE block_handler)
5488 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5489 VALUE proc = VM_BH_TO_PROC(block_handler);
5490 is_lambda = block_proc_is_lambda(proc);
5491 block_handler = vm_proc_to_block_handler(proc);
5494 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5500 bool is_lambda,
VALUE block_handler)
5504 bool is_lambda,
VALUE block_handler);
5506 switch (vm_block_handler_type(block_handler)) {
5507 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5508 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5509 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5510 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5511 default: rb_bug(
"vm_invoke_block: unreachable");
5514 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5518vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5525 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5528 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5529 captured->code.iseq = blockiseq;
5531 return rb_vm_make_proc(ec, captured,
rb_cProc);
5535vm_once_exec(
VALUE iseq)
5542vm_once_clear(
VALUE data)
5545 is->once.running_thread = NULL;
5557 args[0] = obj; args[1] =
Qfalse;
5559 if (!UNDEF_P(r) &&
RTEST(r)) {
5571 enum defined_type
type = (
enum defined_type)op_type;
5578 return rb_gvar_defined(
SYM2ID(obj));
5580 case DEFINED_CVAR: {
5581 const rb_cref_t *cref = vm_get_cref(GET_EP());
5582 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5587 case DEFINED_CONST_FROM: {
5588 bool allow_nil =
type == DEFINED_CONST;
5590 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5595 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5597 case DEFINED_METHOD:{
5602 switch (METHOD_ENTRY_VISI(me)) {
5603 case METHOD_VISI_PRIVATE:
5605 case METHOD_VISI_PROTECTED:
5609 case METHOD_VISI_PUBLIC:
5613 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5617 return check_respond_to_missing(obj, v);
5622 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5626 case DEFINED_ZSUPER:
5631 VALUE klass = vm_search_normal_superclass(me->defined_class);
5632 if (!klass)
return false;
5634 ID id = me->def->original_id;
5641 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5643 rb_bug(
"unimplemented defined? type (VM)");
5653 return vm_defined(ec, reg_cfp, op_type, obj, v);
5657vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5660 const VALUE *ep = reg_ep;
5661 for (i = 0; i < lv; i++) {
5662 ep = GET_PREV_EP(ep);
5668vm_get_special_object(
const VALUE *
const reg_ep,
5669 enum vm_special_object_type
type)
5672 case VM_SPECIAL_OBJECT_VMCORE:
5673 return rb_mRubyVMFrozenCore;
5674 case VM_SPECIAL_OBJECT_CBASE:
5675 return vm_get_cbase(reg_ep);
5676 case VM_SPECIAL_OBJECT_CONST_BASE:
5677 return vm_get_const_base(reg_ep);
5679 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5686rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5688 return vm_get_special_object(reg_ep,
type);
5694 const VALUE ary2 = ary2st;
5695 VALUE tmp1 = rb_check_to_array(ary1);
5696 VALUE tmp2 = rb_check_to_array(ary2);
5717 const VALUE ary2 = ary2st;
5719 if (
NIL_P(ary2))
return ary1;
5721 VALUE tmp2 = rb_check_to_array(ary2);
5736 return vm_concat_array(ary1, ary2st);
5740rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5742 return vm_concat_to_array(ary1, ary2st);
5751 VALUE tmp = rb_check_to_array(ary);
5755 else if (
RTEST(flag)) {
5768 return vm_splat_array(flag, ary);
5774 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5776 if (flag & VM_CHECKMATCH_ARRAY) {
5780 for (i = 0; i < n; i++) {
5782 VALUE c = check_match(ec, v, target,
type);
5791 return check_match(ec, pattern, target,
type);
5798 return vm_check_match(ec, target, pattern, flag);
5802vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5804 const VALUE kw_bits = *(ep - bits);
5807 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5808 if ((idx < VM_KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5821 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5822 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5823 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5824 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5828 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5831 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5834 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5837 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5844vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5849 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5850 return rb_public_const_get_at(cbase,
id);
5858vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5863 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5868 "superclass mismatch for class %"PRIsVALUE
"",
5881vm_check_if_module(
ID id,
VALUE mod)
5900vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5903 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super :
rb_cObject;
5910vm_declare_module(
ID id,
VALUE cbase)
5916NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5920 VALUE name = rb_id2str(
id);
5921 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5923 VALUE location = rb_const_source_location_at(cbase,
id);
5924 if (!
NIL_P(location)) {
5925 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5926 " previous definition of %"PRIsVALUE
" was here",
5933vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5937 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5939 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5943 vm_check_if_namespace(cbase);
5948 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5949 if (!vm_check_if_class(
id, flags, super, klass))
5950 unmatched_redefinition(
"class", cbase,
id, klass);
5954 return vm_declare_class(
id, flags, cbase, super);
5959vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5963 vm_check_if_namespace(cbase);
5964 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5965 if (!vm_check_if_module(
id, mod))
5966 unmatched_redefinition(
"module", cbase,
id, mod);
5970 return vm_declare_module(
id, cbase);
5975vm_find_or_create_class_by_id(
ID id,
5980 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5983 case VM_DEFINECLASS_TYPE_CLASS:
5985 return vm_define_class(
id, flags, cbase, super);
5987 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5991 case VM_DEFINECLASS_TYPE_MODULE:
5993 return vm_define_module(
id, flags, cbase);
5996 rb_bug(
"unknown defineclass type: %d", (
int)
type);
6000static rb_method_visibility_t
6005 if (!vm_env_cref_by_cref(cfp->ep)) {
6006 return METHOD_VISI_PUBLIC;
6009 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
6018 if (!vm_env_cref_by_cref(cfp->ep)) {
6022 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
6030 rb_method_visibility_t visi;
6035 visi = METHOD_VISI_PUBLIC;
6038 klass = CREF_CLASS_FOR_DEFINITION(cref);
6039 visi = vm_scope_visibility_get(ec);
6046 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
6049 !RCLASS_SINGLETON_P(klass) &&
6051 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
6054 if (!is_singleton && vm_scope_module_func_check(ec)) {
6056 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
6065rb_vm_untag_block_handler(
VALUE block_handler)
6067 if (VM_BLOCK_HANDLER_NONE == block_handler)
return Qnil;
6069 switch (vm_block_handler_type(block_handler)) {
6070 case block_handler_type_iseq:
6071 case block_handler_type_ifunc: {
6073 return captured->code.val;
6075 case block_handler_type_proc:
6076 case block_handler_type_symbol:
6077 return block_handler;
6079 rb_bug(
"rb_vm_untag_block_handler: unreachable");
6086 return rb_vm_untag_block_handler(VM_CF_BLOCK_HANDLER(reg_cfp));
6095 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
6097 if (block_handler == VM_BLOCK_HANDLER_NONE) {
6098 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
6101 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
6105enum method_explorer_type {
6107 mexp_search_invokeblock,
6116 VALUE block_handler,
6117 enum method_explorer_type method_explorer
6122 int argc = vm_ci_argc(ci);
6123 VALUE recv = TOPN(argc);
6125 .block_handler = block_handler,
6126 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
6132 switch (method_explorer) {
6133 case mexp_search_method:
6134 calling.cc = cc = vm_search_method_fastpath(reg_cfp, cd,
CLASS_OF(recv));
6135 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6137 case mexp_search_super:
6138 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
6139 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6141 case mexp_search_invokeblock:
6142 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6152 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6153 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6167 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6169 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6171 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6172 RB_OBJ_WRITE(CFP_ISEQ(GET_CFP()), &cd->cc, adjusted_cd.cd.cc);
6183 VALUE bh = VM_BLOCK_HANDLER_NONE;
6184 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6194 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6195 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6209 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6211 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6213 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6214 RB_OBJ_WRITE(CFP_ISEQ(GET_CFP()), &cd->cc, adjusted_cd.cd.cc);
6225 VALUE bh = VM_BLOCK_HANDLER_NONE;
6226 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6254 if (check_method_basic_definition(cme)) {
6263 if (check_cfunc(cme, rb_mod_to_s)) {
6269 val = rb_mod_to_s(recv);
6275 if (check_cfunc(cme, rb_nil_to_s)) {
6276 return rb_nil_to_s(recv);
6280 if (check_cfunc(cme, rb_true_to_s)) {
6281 return rb_true_to_s(recv);
6285 if (check_cfunc(cme, rb_false_to_s)) {
6286 return rb_false_to_s(recv);
6290 if (check_cfunc(cme, rb_int_to_s)) {
6291 return rb_fix_to_s(recv);
6303 return vm_objtostring(reg_cfp, recv, cd);
6307vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6309 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6318vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6320 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6329vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6331 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6345 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6349 VALUE args[1] = {target};
6352 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6355 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6362 return vm_opt_duparray_include_p(ec, ary, target);
6368 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6369 if (array_len == 0) {
6373 VALUE result = *ptr;
6374 rb_snum_t i = array_len - 1;
6376 const VALUE v = *++ptr;
6377 if (OPTIMIZED_CMP(v, result) > 0) {
6392 return vm_opt_newarray_max(ec, array_len, ptr);
6398 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6399 if (array_len == 0) {
6403 VALUE result = *ptr;
6404 rb_snum_t i = array_len - 1;
6406 const VALUE v = *++ptr;
6407 if (OPTIMIZED_CMP(v, result) < 0) {
6422 return vm_opt_newarray_min(ec, array_len, ptr);
6429 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6430 return rb_ary_hash_values(array_len, ptr);
6440 return vm_opt_newarray_hash(ec, array_len, ptr);
6449 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6450 struct RArray fake_ary = {RBASIC_INIT};
6451 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, array_len);
6455 VALUE args[1] = {target};
6463 return vm_opt_newarray_include_p(ec, array_len,
ptr, target);
6469 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6470 struct RArray fake_ary = {RBASIC_INIT};
6471 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, array_len);
6472 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6482 if (!UNDEF_P(buffer)) {
6483 args[1] = rb_hash_new_with_size(1);
6484 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6489 return rb_vm_call_with_refinements(ec,
rb_ary_new4(array_len,
ptr), idPack, argc, args, kw_splat);
6496 return vm_opt_newarray_pack_buffer(ec, array_len,
ptr, fmt, buffer);
6502 return vm_opt_newarray_pack_buffer(ec, array_len,
ptr, fmt,
Qundef);
6508vm_track_constant_cache(
ID id,
void *ic)
6511 struct rb_id_table *const_cache = &vm->constant_cache;
6512 VALUE lookup_result;
6515 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6519 ics = set_init_numtable();
6520 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6535 vm->inserting_constant_cache_id = id;
6537 set_insert(ics, (st_data_t)ic);
6539 vm->inserting_constant_cache_id = (
ID)0;
6546 for (
int i = 0; segments[i]; i++) {
6547 ID id = segments[i];
6548 if (
id == idNULL)
continue;
6549 vm_track_constant_cache(
id, ic);
6558 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6559 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6561 return (ic_cref == NULL ||
6562 ic_cref == vm_get_cref(reg_ep));
6570 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6571 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6576rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6578 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6584 if (ruby_vm_const_missing_count > 0) {
6585 ruby_vm_const_missing_count = 0;
6592 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6596 ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
6600 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6601 rb_yjit_constant_ic_update(iseq, ic, pos);
6611 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6614 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6617 ruby_vm_constant_cache_misses++;
6618 val = vm_get_ev_const_chain(ec, segments);
6619 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6622 vm_ic_update(CFP_ISEQ(GET_CFP()), ic, val, GET_EP(), CFP_PC(GET_CFP()) - 2);
6634 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6635 return is->once.value;
6637 else if (is->once.running_thread == NULL) {
6639 is->once.running_thread = th;
6644 RB_OBJ_SET_SHAREABLE(val);
6650 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6653 else if (is->once.running_thread == th) {
6655 return vm_once_exec((
VALUE)iseq);
6659 RUBY_VM_CHECK_INTS(ec);
6666vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6668 switch (OBJ_BUILTIN_TYPE(key)) {
6674 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6675 SYMBOL_REDEFINED_OP_FLAG |
6676 INTEGER_REDEFINED_OP_FLAG |
6677 FLOAT_REDEFINED_OP_FLAG |
6678 NIL_REDEFINED_OP_FLAG |
6679 TRUE_REDEFINED_OP_FLAG |
6680 FALSE_REDEFINED_OP_FLAG |
6681 STRING_REDEFINED_OP_FLAG)) {
6685 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6689 if (rb_hash_stlike_lookup(hash, key, &val)) {
6709 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6710 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6711 static const char stack_consistency_error[] =
6712 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6713#if defined RUBY_DEVEL
6714 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6719 rb_bug(stack_consistency_error, nsp, nbp);
6726 if (FIXNUM_2_P(recv, obj) &&
6727 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6728 return rb_fix_plus_fix(recv, obj);
6730 else if (FLONUM_2_P(recv, obj) &&
6731 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6739 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6744 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6745 return rb_str_opt_plus(recv, obj);
6749 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6760 if (FIXNUM_2_P(recv, obj) &&
6761 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6762 return rb_fix_minus_fix(recv, obj);
6764 else if (FLONUM_2_P(recv, obj) &&
6765 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6773 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6784 if (FIXNUM_2_P(recv, obj) &&
6785 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6786 return rb_fix_mul_fix(recv, obj);
6788 else if (FLONUM_2_P(recv, obj) &&
6789 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6797 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6808 if (FIXNUM_2_P(recv, obj) &&
6809 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6810 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6812 else if (FLONUM_2_P(recv, obj) &&
6813 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6814 return rb_flo_div_flo(recv, obj);
6821 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6822 return rb_flo_div_flo(recv, obj);
6832 if (FIXNUM_2_P(recv, obj) &&
6833 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6834 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6836 else if (FLONUM_2_P(recv, obj) &&
6837 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6845 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6856 if (vm_method_cfunc_is(reg_cfp, cd, recv, rb_obj_not_equal)) {
6857 VALUE val = opt_equality(reg_cfp, recv, obj, cd_eq);
6859 if (!UNDEF_P(val)) {
6860 return RBOOL(!
RTEST(val));
6870 if (FIXNUM_2_P(recv, obj) &&
6871 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6874 else if (FLONUM_2_P(recv, obj) &&
6875 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6883 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6894 if (FIXNUM_2_P(recv, obj) &&
6895 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6898 else if (FLONUM_2_P(recv, obj) &&
6899 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6907 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6918 if (FIXNUM_2_P(recv, obj) &&
6919 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6922 else if (FLONUM_2_P(recv, obj) &&
6923 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6931 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6942 if (FIXNUM_2_P(recv, obj) &&
6943 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6946 else if (FLONUM_2_P(recv, obj) &&
6947 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6955 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6971 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6980 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6998 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
7009 if (FIXNUM_2_P(recv, obj) &&
7010 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
7022 if (FIXNUM_2_P(recv, obj) &&
7023 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
7024 return rb_fix_aref(recv, obj);
7029 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
7031 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
7034 return rb_ary_aref1(recv, obj);
7038 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
7039 return rb_hash_aref(recv, obj);
7053 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
7059 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
7060 rb_hash_aset(recv, obj, set);
7069vm_opt_length(
VALUE recv,
int bop)
7075 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
7076 if (bop == BOP_EMPTY_P) {
7077 return LONG2NUM(RSTRING_LEN(recv));
7084 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
7088 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
7097vm_opt_empty_p(
VALUE recv)
7099 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
7112 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
7115 else if (vm_method_cfunc_is(reg_cfp, cd, recv, rb_false)) {
7131 case RSHIFT(~0UL, 1):
7134 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7152vm_opt_succ(
VALUE recv)
7155 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7156 return fix_succ(recv);
7162 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7173 if (vm_method_cfunc_is(reg_cfp, cd, recv, rb_obj_not)) {
7174 return RBOOL(!
RTEST(recv));
7189 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7193 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7211 VALUE self = GET_SELF();
7213 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7215 if (local_hooks) local_hooks->running++;
7217 if (event & global_hooks->events) {
7220 vm_dtrace(event, ec);
7221 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7225 if (local_hooks) local_hooks->running--;
7226 if (local_hooks != NULL) {
7227 if (event & local_hooks->events) {
7230 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7236#define VM_TRACE_HOOK(target_event, val) do { \
7237 if ((pc_events & (target_event)) & enabled_flags) { \
7238 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
7245 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7246 VM_ASSERT(ISEQ_BODY(CFP_ISEQ(cfp))->
type == ISEQ_TYPE_RESCUE);
7247 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7253 const VALUE *pc = reg_cfp->pc;
7255 rb_event_flag_t enabled_flags = r->pub.hooks.events & ISEQ_TRACE_EVENTS;
7258 if (enabled_flags == 0 && rb_ractor_targeted_hooks_cnt(r) == 0) {
7262 const rb_iseq_t *iseq = CFP_ISEQ(reg_cfp);
7263 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7265 unsigned int local_hooks_cnt = iseq->aux.exec.local_hooks_cnt;
7267 if (RB_UNLIKELY(local_hooks_cnt > 0)) {
7269 if (st_lookup(rb_ractor_targeted_hooks(r), (st_data_t)iseq, &val)) {
7273 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7277 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7278 enabled_flags |= iseq_local_events;
7280 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7282 if (bmethod_frame) {
7284 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7285 unsigned int bmethod_hooks_cnt = me->def->body.bmethod.local_hooks_cnt;
7286 if (RB_UNLIKELY(bmethod_hooks_cnt > 0)) {
7288 if (st_lookup(rb_ractor_targeted_hooks(r), (st_data_t)me->def, &val)) {
7291 if (bmethod_local_hooks) {
7292 bmethod_local_events = bmethod_local_hooks->events;
7297 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7301 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7309 else if (ec->trace_arg != NULL) {
7317 rb_event_flag_t bmethod_events = ractor_events | bmethod_local_events;
7320 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7323 RSTRING_PTR(rb_iseq_path(iseq)),
7324 (
int)rb_iseq_line_no(iseq, pos),
7325 RSTRING_PTR(rb_iseq_label(iseq)));
7327 VM_ASSERT(reg_cfp->pc == pc);
7328 VM_ASSERT(pc_events != 0);
7338 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7339 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7350#if VM_CHECK_MODE > 0
7351NORETURN( NOINLINE( COLDFUNC
7352void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7355Init_vm_stack_canary(
void)
7358 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7359 vm_stack_canary |= 0x01;
7361 vm_stack_canary_was_born =
true;
7366rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7370 const char *insn = rb_insns_name(i);
7374 rb_bug(
"dead canary found at %s: %s", insn, str);
7378void Init_vm_stack_canary(
void) { }
7410 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7417 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7424 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7431 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7438 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7445 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7452 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7459 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7466 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7472 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7473 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7479 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7480 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7486 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7487 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7493 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7494 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7500 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7501 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7507 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7508 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7514 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7515 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7520static builtin_invoker
7521lookup_builtin_invoker(
int argc)
7523 static const builtin_invoker invokers[] = {
7542 return invokers[argc];
7548 const bool canary_p = ISEQ_BODY(CFP_ISEQ(reg_cfp))->builtin_attrs & BUILTIN_ATTR_LEAF;
7549 SETUP_CANARY(canary_p);
7550 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7551 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7552 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7559 return invoke_bf(ec, cfp, bf, argv);
7566 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7567 for (
int i=0; i<bf->argc; i++) {
7568 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(CFP_ISEQ(cfp))->local_table[i+start_index]));
7570 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7571 (
void *)(uintptr_t)bf->func_ptr);
7574 if (bf->argc == 0) {
7575 return invoke_bf(ec, cfp, bf, NULL);
7578 const VALUE *argv = cfp->ep - ISEQ_BODY(CFP_ISEQ(cfp))->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7579 return invoke_bf(ec, cfp, bf, argv);
7589 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
static bool RB_FL_ABLE(VALUE obj)
Checks if the object is flaggable.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_cObject
Object class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument must be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.