Ruby 4.1.0dev (2026-03-01 revision d68e4be1873e364c5ee24ed112bce4bc86e3a406)
vm_method.c (d68e4be1873e364c5ee24ed112bce4bc86e3a406)
1/*
2 * This file is included by vm.c
3 */
4
5#include "id_table.h"
6#include "yjit.h"
7
8#define METHOD_DEBUG 0
9
10static int vm_redefinition_check_flag(VALUE klass);
11static void rb_vm_check_redefinition_opt_method(const rb_method_entry_t *me, VALUE klass);
12static inline rb_method_entry_t *lookup_method_table(VALUE klass, ID id);
13
14#define object_id idObject_id
15#define added idMethod_added
16#define singleton_added idSingleton_method_added
17#define removed idMethod_removed
18#define singleton_removed idSingleton_method_removed
19#define undefined idMethod_undefined
20#define singleton_undefined idSingleton_method_undefined
21
22#define ruby_running (GET_VM()->running)
23/* int ruby_running = 0; */
24
25static enum rb_id_table_iterator_result
26mark_cc_entry_i(VALUE ccs_ptr, void *data)
27{
28 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
29
30 VM_ASSERT(vm_ccs_p(ccs));
31
32 if (METHOD_ENTRY_INVALIDATED(ccs->cme)) {
33 /* Before detaching the CCs from this class, we need to invalidate the cc
34 * since we will no longer be marking the cme on their behalf.
35 */
36 for (int i = 0; i < ccs->len; i++) {
37 const struct rb_callcache *cc = ccs->entries[i].cc;
38 if (cc->klass == Qundef) continue; // already invalidated
39 VM_ASSERT(cc->klass == Qundef || vm_cc_check_cme(cc, ccs->cme));
40 VM_ASSERT(!vm_cc_super_p(cc) && !vm_cc_refinement_p(cc));
41 vm_cc_invalidate(cc);
42 }
43 ruby_sized_xfree(ccs, vm_ccs_alloc_size(ccs->capa));
44 return ID_TABLE_DELETE;
45 }
46 else {
47 rb_gc_mark_movable((VALUE)ccs->cme);
48
49 for (int i = 0; i < ccs->len; i++) {
50 const struct rb_callcache *cc = ccs->entries[i].cc;
51 VM_ASSERT(cc->klass == Qundef || vm_cc_check_cme(cc, ccs->cme));
52
53 rb_gc_mark_movable((VALUE)cc);
54 }
55 return ID_TABLE_CONTINUE;
56 }
57}
58
59static void
60vm_cc_table_mark(void *data)
61{
62 struct rb_id_table *tbl = (struct rb_id_table *)data;
63 if (tbl) {
64 rb_id_table_foreach_values(tbl, mark_cc_entry_i, NULL);
65 }
66}
67
68static enum rb_id_table_iterator_result
69cc_table_free_i(VALUE ccs_ptr, void *data)
70{
71 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
72 VM_ASSERT(vm_ccs_p(ccs));
73
74 ruby_sized_xfree(ccs, vm_ccs_alloc_size(ccs->capa));
75
76 return ID_TABLE_CONTINUE;
77}
78
79static void
80vm_cc_table_free(void *data)
81{
82 struct rb_id_table *tbl = (struct rb_id_table *)data;
83
84 rb_id_table_foreach_values(tbl, cc_table_free_i, NULL);
85 rb_managed_id_table_type.function.dfree(data);
86}
87
88static enum rb_id_table_iterator_result
89cc_table_memsize_i(VALUE ccs_ptr, void *data_ptr)
90{
91 size_t *total_size = data_ptr;
92 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
93 *total_size += sizeof(*ccs);
94 *total_size += sizeof(ccs->entries[0]) * ccs->capa;
95 return ID_TABLE_CONTINUE;
96}
97
98static size_t
99vm_cc_table_memsize(const void *data)
100{
101 size_t memsize = rb_managed_id_table_type.function.dsize(data);
102 struct rb_id_table *tbl = (struct rb_id_table *)data;
103 rb_id_table_foreach_values(tbl, cc_table_memsize_i, &memsize);
104 return memsize;
105}
106
107static enum rb_id_table_iterator_result
108compact_cc_entry_i(VALUE ccs_ptr, void *data)
109{
110 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_ptr;
111
112 ccs->cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)ccs->cme);
113 VM_ASSERT(vm_ccs_p(ccs));
114
115 for (int i=0; i<ccs->len; i++) {
116 ccs->entries[i].cc = (const struct rb_callcache *)rb_gc_location((VALUE)ccs->entries[i].cc);
117 }
118
119 return ID_TABLE_CONTINUE;
120}
121
122static void
123vm_cc_table_compact(void *data)
124{
125 struct rb_id_table *tbl = (struct rb_id_table *)data;
126 rb_id_table_foreach_values(tbl, compact_cc_entry_i, NULL);
127}
128
129static const rb_data_type_t cc_table_type = {
130 .wrap_struct_name = "VM/cc_table",
131 .function = {
132 .dmark = vm_cc_table_mark,
133 .dfree = vm_cc_table_free,
134 .dsize = vm_cc_table_memsize,
135 .dcompact = vm_cc_table_compact,
136 },
137 .parent = &rb_managed_id_table_type,
138 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE,
139};
140
141VALUE
142rb_vm_cc_table_create(size_t capa)
143{
144 return rb_managed_id_table_create(&cc_table_type, capa);
145}
146
147static enum rb_id_table_iterator_result
148vm_cc_table_dup_i(ID key, VALUE old_ccs_ptr, void *data)
149{
150 VALUE new_table = (VALUE)data;
151 struct rb_class_cc_entries *old_ccs = (struct rb_class_cc_entries *)old_ccs_ptr;
152
153 if (METHOD_ENTRY_INVALIDATED(old_ccs->cme)) {
154 // Invalidated CME. This entry will be removed from the old table on
155 // the next GC mark, so it's unsafe (and undesirable) to copy
156 return ID_TABLE_CONTINUE;
157 }
158
159 size_t memsize = vm_ccs_alloc_size(old_ccs->capa);
160 struct rb_class_cc_entries *new_ccs = ruby_xcalloc(1, memsize);
161 rb_managed_id_table_insert(new_table, key, (VALUE)new_ccs);
162
163 // We hold the VM lock, so invalidation should not have happened between
164 // our earlier invalidation check and now.
165 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(old_ccs->cme));
166
167 memcpy(new_ccs, old_ccs, memsize);
168
169#if VM_CHECK_MODE > 0
170 new_ccs->debug_sig = ~(VALUE)new_ccs;
171#endif
172
173 RB_OBJ_WRITTEN(new_table, Qundef, (VALUE)new_ccs->cme);
174 for (int index = 0; index < new_ccs->len; index++) {
175 RB_OBJ_WRITTEN(new_table, Qundef, new_ccs->entries[index].cc);
176 }
177 return ID_TABLE_CONTINUE;
178}
179
180VALUE
181rb_vm_cc_table_dup(VALUE old_table)
182{
183 ASSERT_vm_locking();
184 VALUE new_table = rb_vm_cc_table_create(rb_managed_id_table_size(old_table));
185 rb_managed_id_table_foreach(old_table, vm_cc_table_dup_i, (void *)new_table);
186 return new_table;
187}
188
189static void
190vm_ccs_invalidate(struct rb_class_cc_entries *ccs)
191{
192 for (int i=0; i<ccs->len; i++) {
193 const struct rb_callcache *cc = ccs->entries[i].cc;
194 VM_ASSERT(!vm_cc_super_p(cc) && !vm_cc_refinement_p(cc));
195 vm_cc_invalidate(cc);
196 }
197}
198
199static void
200rb_vm_ccs_invalidate_and_free(struct rb_class_cc_entries *ccs)
201{
202 RB_DEBUG_COUNTER_INC(ccs_free);
203 vm_ccs_invalidate(ccs);
204 ruby_sized_xfree(ccs, vm_ccs_alloc_size(ccs->capa));
205}
206
207void
208rb_vm_cc_table_delete(VALUE table, ID mid)
209{
210 VALUE ccs_obj;
211 if (rb_managed_id_table_lookup(table, mid, &ccs_obj)) {
212 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_obj;
213 rb_managed_id_table_delete(table, mid);
214 rb_vm_ccs_invalidate_and_free(ccs);
215 }
216}
217
218static enum rb_id_table_iterator_result
219vm_ccs_dump_i(ID mid, VALUE val, void *data)
220{
221 const struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)val;
222 fprintf(stderr, " | %s (len:%d) ", rb_id2name(mid), ccs->len);
223 rp(ccs->cme);
224
225 for (int i=0; i<ccs->len; i++) {
226 rp_m( " | \t", ccs->entries[i].cc);
227 }
228
229 return ID_TABLE_CONTINUE;
230}
231
232static void
233vm_ccs_dump(VALUE klass, ID target_mid)
234{
235 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
236 if (cc_tbl) {
237 VALUE ccs;
238 if (target_mid) {
239 if (rb_managed_id_table_lookup(cc_tbl, target_mid, &ccs)) {
240 fprintf(stderr, " [CCTB] %p\n", (void *)cc_tbl);
241 vm_ccs_dump_i(target_mid, ccs, NULL);
242 }
243 }
244 else {
245 fprintf(stderr, " [CCTB] %p\n", (void *)cc_tbl);
246 rb_managed_id_table_foreach(cc_tbl, vm_ccs_dump_i, (void *)target_mid);
247 }
248 }
249}
250
251static enum rb_id_table_iterator_result
252vm_cme_dump_i(ID mid, VALUE val, void *data)
253{
254 ID target_mid = (ID)data;
255 if (target_mid == 0 || mid == target_mid) {
256 rp_m(" > ", val);
257 }
258 return ID_TABLE_CONTINUE;
259}
260
261static VALUE
262vm_mtbl_dump(VALUE klass, ID target_mid)
263{
264 fprintf(stderr, "# vm_mtbl\n");
265 while (klass) {
266 rp_m(" -> ", klass);
267 VALUE me;
268
269 if (RCLASS_M_TBL(klass)) {
270 if (target_mid != 0) {
271 if (rb_id_table_lookup(RCLASS_M_TBL(klass), target_mid, &me)) {
272 rp_m(" [MTBL] ", me);
273 }
274 }
275 else {
276 fprintf(stderr, " ## RCLASS_M_TBL (%p)\n", (void *)RCLASS_M_TBL(klass));
277 rb_id_table_foreach(RCLASS_M_TBL(klass), vm_cme_dump_i, NULL);
278 }
279 }
280 else {
281 fprintf(stderr, " MTBL: NULL\n");
282 }
283 if (RCLASS_WRITABLE_CALLABLE_M_TBL(klass)) {
284 if (target_mid != 0) {
285 if (rb_id_table_lookup(RCLASS_WRITABLE_CALLABLE_M_TBL(klass), target_mid, &me)) {
286 rp_m(" [CM**] ", me);
287 }
288 }
289 else {
290 fprintf(stderr, " ## RCLASS_CALLABLE_M_TBL\n");
291 rb_id_table_foreach(RCLASS_WRITABLE_CALLABLE_M_TBL(klass), vm_cme_dump_i, NULL);
292 }
293 }
294 if (RCLASS_WRITABLE_CC_TBL(klass)) {
295 vm_ccs_dump(klass, target_mid);
296 }
297 klass = RCLASS_SUPER(klass);
298 }
299 return Qnil;
300}
301
302void
303rb_vm_mtbl_dump(const char *msg, VALUE klass, ID target_mid)
304{
305 fprintf(stderr, "[%s] ", msg);
306 vm_mtbl_dump(klass, target_mid);
307}
308
309static inline void
310vm_cme_invalidate(rb_callable_method_entry_t *cme)
311{
312 VM_ASSERT(IMEMO_TYPE_P(cme, imemo_ment), "cme: %d", imemo_type((VALUE)cme));
313 VM_ASSERT(callable_method_entry_p(cme));
314 METHOD_ENTRY_INVALIDATED_SET(cme);
315 RB_DEBUG_COUNTER_INC(cc_cme_invalidate);
316
317 rb_yjit_cme_invalidate(cme);
318 rb_zjit_cme_invalidate(cme);
319}
320
321static int
322rb_clear_constant_cache_for_id_i(st_data_t ic, st_data_t arg)
323{
324 ((IC) ic)->entry = NULL;
325 return ST_CONTINUE;
326}
327
328void
330{
331 VALUE lookup_result;
332 rb_vm_t *vm = GET_VM();
333
334 if (rb_id_table_lookup(vm->constant_cache, id, &lookup_result)) {
335 set_table *ics = (set_table *)lookup_result;
336 set_table_foreach(ics, rb_clear_constant_cache_for_id_i, (st_data_t) NULL);
337 ruby_vm_constant_cache_invalidations += ics->num_entries;
338 }
339
340 rb_yjit_constant_state_changed(id);
341 rb_zjit_constant_state_changed(id);
342}
343
344static void
345invalidate_negative_cache(ID mid)
346{
347 VALUE cme;
348 rb_vm_t *vm = GET_VM();
349
350 if (rb_id_table_lookup(vm->negative_cme_table, mid, &cme)) {
351 rb_id_table_delete(vm->negative_cme_table, mid);
352 vm_cme_invalidate((rb_callable_method_entry_t *)cme);
353 RB_DEBUG_COUNTER_INC(cc_invalidate_negative);
354 }
355}
356
357const rb_method_entry_t * rb_method_entry_clone(const rb_method_entry_t *src_me);
358static const rb_callable_method_entry_t *complemented_callable_method_entry(VALUE klass, ID id);
359static const rb_callable_method_entry_t *lookup_overloaded_cme(const rb_callable_method_entry_t *cme);
360
361static void
362invalidate_method_cache_in_cc_table(VALUE tbl, ID mid)
363{
364 VALUE ccs_data;
365 if (tbl && rb_managed_id_table_lookup(tbl, mid, &ccs_data)) {
366 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
367 rb_yjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
368 rb_zjit_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
369 if (NIL_P(ccs->cme->owner)) invalidate_negative_cache(mid);
370 rb_vm_ccs_invalidate_and_free(ccs);
371 rb_managed_id_table_delete(tbl, mid);
372 RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_ccs);
373 }
374}
375
376static void
377invalidate_callable_method_entry_in_callable_m_table(struct rb_id_table *tbl, ID mid)
378{
379 VALUE cme;
380 if (tbl && rb_id_table_lookup(tbl, mid, &cme)) {
381 rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
382 rb_zjit_cme_invalidate((rb_callable_method_entry_t *)cme);
383 rb_id_table_delete(tbl, mid);
384 RB_DEBUG_COUNTER_INC(cc_invalidate_leaf_callable);
385 }
386}
387
389 VALUE klass;
390 ID mid;
391 const rb_method_entry_t *cme;
392 const rb_method_entry_t *newer;
393};
394
395static void
396invalidate_callable_method_entry_in_every_m_table_i(rb_classext_t *ext, bool is_prime, VALUE box_value, void *data)
397{
398 st_data_t me;
400 struct rb_id_table *tbl = RCLASSEXT_M_TBL(ext);
401
402 if (rb_id_table_lookup(tbl, arg->mid, &me) && arg->cme == (const rb_method_entry_t *)me) {
403 rb_method_table_insert(arg->klass, tbl, arg->mid, arg->newer);
404 }
405}
406
407static void
408invalidate_callable_method_entry_in_every_m_table(VALUE klass, ID mid, const rb_callable_method_entry_t *cme)
409{
410 // The argument cme must be invalidated later in the caller side
411 const rb_method_entry_t *newer = rb_method_entry_clone((const rb_method_entry_t *)cme);
413 .klass = klass,
414 .mid = mid,
415 .cme = (const rb_method_entry_t *) cme,
416 .newer = newer,
417 };
418 rb_class_classext_foreach(klass, invalidate_callable_method_entry_in_every_m_table_i, (void *)&arg);
419}
420
421static void
422invalidate_complemented_method_entry_in_callable_m_table(struct rb_id_table *tbl, ID mid)
423{
424 VALUE cme;
425 if (tbl && rb_id_table_lookup(tbl, mid, &cme)) {
426 rb_yjit_cme_invalidate((rb_callable_method_entry_t *)cme);
427 rb_zjit_cme_invalidate((rb_callable_method_entry_t *)cme);
428 rb_id_table_delete(tbl, mid);
429 RB_DEBUG_COUNTER_INC(cc_invalidate_tree_callable);
430 }
431}
432
433static void
434clear_method_cache_by_id_in_class(VALUE klass, ID mid)
435{
436 VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
437 if (rb_objspace_garbage_object_p(klass)) return;
438
439 RB_VM_LOCKING() {
440 rb_vm_barrier();
441
442 if (LIKELY(RCLASS_SUBCLASSES_FIRST(klass) == NULL)) {
443 // no subclasses
444 // check only current class
445
446 // invalidate CCs
447 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
448 invalidate_method_cache_in_cc_table(cc_tbl, mid);
449 if (RCLASS_CC_TBL_NOT_PRIME_P(klass, cc_tbl)) {
450 invalidate_method_cache_in_cc_table(RCLASS_PRIME_CC_TBL(klass), mid);
451 }
452
453 // remove from callable_m_tbl, if exists
454 struct rb_id_table *cm_tbl = RCLASS_WRITABLE_CALLABLE_M_TBL(klass);
455 invalidate_callable_method_entry_in_callable_m_table(cm_tbl, mid);
456 if (RCLASS_CALLABLE_M_TBL_NOT_PRIME_P(klass, cm_tbl)) {
457 invalidate_callable_method_entry_in_callable_m_table(RCLASS_PRIME_CALLABLE_M_TBL(klass), mid);
458 }
459
460 RB_DEBUG_COUNTER_INC(cc_invalidate_leaf);
461 }
462 else {
463 const rb_callable_method_entry_t *cme = complemented_callable_method_entry(klass, mid);
464
465 if (cme) {
466 // invalidate cme if found to invalidate the inline method cache.
467 if (METHOD_ENTRY_CACHED(cme)) {
468 if (METHOD_ENTRY_COMPLEMENTED(cme)) {
469 // do nothing
470 }
471 else {
472 // invalidate cc by invalidating cc->cme
473 VALUE owner = cme->owner;
474 VM_ASSERT_TYPE(owner, T_CLASS);
475 VALUE klass_housing_cme;
476 if (cme->def->type == VM_METHOD_TYPE_REFINED && !cme->def->body.refined.orig_me) {
477 klass_housing_cme = owner;
478 }
479 else {
480 klass_housing_cme = RCLASS_ORIGIN(owner);
481 }
482
483 // replace the cme that will be invalid in the all classexts
484 invalidate_callable_method_entry_in_every_m_table(klass_housing_cme, mid, cme);
485 }
486
487 vm_cme_invalidate((rb_callable_method_entry_t *)cme);
488 RB_DEBUG_COUNTER_INC(cc_invalidate_tree_cme);
489
490 // In case of refinement ME, also invalidate the wrapped ME that
491 // could be cached at some callsite and is unreachable from any
492 // RCLASS_WRITABLE_CC_TBL.
493 if (cme->def->type == VM_METHOD_TYPE_REFINED && cme->def->body.refined.orig_me) {
494 vm_cme_invalidate((rb_callable_method_entry_t *)cme->def->body.refined.orig_me);
495 }
496
497 if (cme->def->iseq_overload) {
498 rb_callable_method_entry_t *monly_cme = (rb_callable_method_entry_t *)lookup_overloaded_cme(cme);
499 if (monly_cme) {
500 vm_cme_invalidate(monly_cme);
501 }
502 }
503 }
504
505 // invalidate complement tbl
506 if (METHOD_ENTRY_COMPLEMENTED(cme)) {
507 VALUE defined_class = cme->defined_class;
508 struct rb_id_table *cm_tbl = RCLASS_WRITABLE_CALLABLE_M_TBL(defined_class);
509 invalidate_complemented_method_entry_in_callable_m_table(cm_tbl, mid);
510 if (RCLASS_CALLABLE_M_TBL_NOT_PRIME_P(defined_class, cm_tbl)) {
511 struct rb_id_table *prime_cm_table = RCLASS_PRIME_CALLABLE_M_TBL(defined_class);
512 invalidate_complemented_method_entry_in_callable_m_table(prime_cm_table, mid);
513 }
514 }
515
516 RB_DEBUG_COUNTER_INC(cc_invalidate_tree);
517 }
518 else {
519 invalidate_negative_cache(mid);
520 }
521 }
522
523 rb_gccct_clear_table(Qnil);
524 }
525}
526
527static void
528clear_iclass_method_cache_by_id(VALUE iclass, VALUE d)
529{
530 VM_ASSERT_TYPE(iclass, T_ICLASS);
531 ID mid = (ID)d;
532 clear_method_cache_by_id_in_class(iclass, mid);
533}
534
535static void
536clear_iclass_method_cache_by_id_for_refinements(VALUE klass, VALUE d)
537{
538 if (RB_TYPE_P(klass, T_ICLASS)) {
539 ID mid = (ID)d;
540 clear_method_cache_by_id_in_class(klass, mid);
541 }
542}
543
544void
545rb_clear_method_cache(VALUE klass_or_module, ID mid)
546{
547 if (RB_TYPE_P(klass_or_module, T_MODULE)) {
548 VALUE module = klass_or_module; // alias
549
550 if (FL_TEST(module, RMODULE_IS_REFINEMENT)) {
551 VALUE refined_class = rb_refinement_module_get_refined_class(module);
552 rb_clear_method_cache(refined_class, mid);
553 rb_class_foreach_subclass(refined_class, clear_iclass_method_cache_by_id_for_refinements, mid);
554 rb_clear_all_refinement_method_cache();
555 }
556 rb_class_foreach_subclass(module, clear_iclass_method_cache_by_id, mid);
557 }
558 else {
559 clear_method_cache_by_id_in_class(klass_or_module, mid);
560 }
561}
562
563static enum rb_id_table_iterator_result
564invalidate_method_entry_in_iclass_callable_m_tbl(VALUE cme, void *data)
565{
566 vm_cme_invalidate((rb_callable_method_entry_t *)cme);
567 return ID_TABLE_DELETE;
568}
569
570static enum rb_id_table_iterator_result
571invalidate_ccs_in_iclass_cc_tbl(VALUE value, void *data)
572{
573 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)value;
574 vm_cme_invalidate((rb_callable_method_entry_t *)ccs->cme);
575 ruby_sized_xfree(ccs, vm_ccs_alloc_size(ccs->capa));
576 return ID_TABLE_DELETE;
577}
578
579void
580rb_invalidate_method_caches(struct rb_id_table *cm_tbl, VALUE cc_tbl)
581{
582 if (cm_tbl) {
583 rb_id_table_foreach_values(cm_tbl, invalidate_method_entry_in_iclass_callable_m_tbl, NULL);
584 }
585 if (cc_tbl) {
586 rb_managed_id_table_foreach_values(cc_tbl, invalidate_ccs_in_iclass_cc_tbl, NULL);
587 }
588}
589
590static st_index_t
591vm_ci_hash(VALUE v)
592{
593 const struct rb_callinfo *ci = (const struct rb_callinfo *)v;
594 st_index_t h;
595 h = rb_hash_start(ci->mid);
596 h = rb_hash_uint(h, ci->flag);
597 h = rb_hash_uint(h, ci->argc);
598 if (ci->kwarg) {
599 for (int i = 0; i < ci->kwarg->keyword_len; i++) {
600 h = rb_hash_uint(h, ci->kwarg->keywords[i]);
601 }
602 }
603 return h;
604}
605
606static int
607vm_ci_hash_cmp(VALUE v1, VALUE v2)
608{
609 const struct rb_callinfo *ci1 = (const struct rb_callinfo *)v1;
610 const struct rb_callinfo *ci2 = (const struct rb_callinfo *)v2;
611 if (ci1->mid != ci2->mid) return 1;
612 if (ci1->flag != ci2->flag) return 1;
613 if (ci1->argc != ci2->argc) return 1;
614 if (ci1->kwarg != NULL) {
615 VM_ASSERT(ci2->kwarg != NULL); // implied by matching flags
616
617 if (ci1->kwarg->keyword_len != ci2->kwarg->keyword_len)
618 return 1;
619
620 for (int i = 0; i < ci1->kwarg->keyword_len; i++) {
621 if (ci1->kwarg->keywords[i] != ci2->kwarg->keywords[i]) {
622 return 1;
623 }
624 }
625 }
626 else {
627 VM_ASSERT(ci2->kwarg == NULL); // implied by matching flags
628 }
629 return 0;
630}
631
632static const struct st_hash_type vm_ci_hashtype = {
633 vm_ci_hash_cmp,
634 vm_ci_hash
635};
636
637static int
638ci_lookup_i(st_data_t *key, st_data_t *value, st_data_t data, int existing)
639{
640 const struct rb_callinfo *ci = (const struct rb_callinfo *)*key;
641 st_data_t *ret = (st_data_t *)data;
642
643 if (existing) {
644 if (rb_objspace_garbage_object_p((VALUE)ci)) {
645 *ret = (st_data_t)NULL;
646 return ST_DELETE;
647 }
648 else {
649 *ret = *key;
650 return ST_STOP;
651 }
652 }
653 else {
654 *key = *value = *ret = (st_data_t)ci;
655 return ST_CONTINUE;
656 }
657}
658
659const struct rb_callinfo *
660rb_vm_ci_lookup(ID mid, unsigned int flag, unsigned int argc, const struct rb_callinfo_kwarg *kwarg)
661{
662 rb_vm_t *vm = GET_VM();
663 const struct rb_callinfo *ci = NULL;
664
665 if (kwarg) {
666 ((struct rb_callinfo_kwarg *)kwarg)->references++;
667 }
668
669 struct rb_callinfo *new_ci = SHAREABLE_IMEMO_NEW(struct rb_callinfo, imemo_callinfo, (VALUE)kwarg);
670 new_ci->mid = mid;
671 new_ci->flag = flag;
672 new_ci->argc = argc;
673
674 RB_VM_LOCKING() {
675 st_table *ci_table = vm->ci_table;
676 VM_ASSERT(ci_table);
677
678 do {
679 st_update(ci_table, (st_data_t)new_ci, ci_lookup_i, (st_data_t)&ci);
680 } while (ci == NULL);
681 }
682
683 VM_ASSERT(ci);
684
685 return ci;
686}
687
688void
689rb_vm_ci_free(const struct rb_callinfo *ci)
690{
691 ASSERT_vm_locking();
692
693 rb_vm_t *vm = GET_VM();
694
695 st_data_t key = (st_data_t)ci;
696 st_delete(vm->ci_table, &key, NULL);
697}
698
700 VALUE *entries;
701 size_t len;
702 size_t capa;
703};
704
705static void
706cc_refinement_set_free(void *ptr)
707{
708 struct cc_refinement_entries *e = ptr;
709 xfree(e->entries);
710}
711
712static size_t
713cc_refinement_set_memsize(const void *ptr)
714{
715 const struct cc_refinement_entries *e = ptr;
716 return e->capa * sizeof(VALUE);
717}
718
719static void
720cc_refinement_set_compact(void *ptr)
721{
722 struct cc_refinement_entries *e = ptr;
723 for (size_t i = 0; i < e->len; i++) {
724 e->entries[i] = rb_gc_location(e->entries[i]);
725 }
726}
727
728static void
729cc_refinement_set_handle_weak_references(void *ptr)
730{
731 struct cc_refinement_entries *e = ptr;
732 size_t write = 0;
733 for (size_t read = 0; read < e->len; read++) {
734 if (rb_gc_handle_weak_references_alive_p(e->entries[read])) {
735 e->entries[write++] = e->entries[read];
736 }
737 }
738 e->len = write;
739}
740
741static const rb_data_type_t cc_refinement_set_type = {
742 "VM/cc_refinement_set",
743 {
744 NULL,
745 cc_refinement_set_free,
746 cc_refinement_set_memsize,
747 cc_refinement_set_compact,
748 cc_refinement_set_handle_weak_references,
749 },
750 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
751};
752
753VALUE
754rb_cc_refinement_set_create(void)
755{
756 struct cc_refinement_entries *e;
757 VALUE obj = TypedData_Make_Struct(0, struct cc_refinement_entries, &cc_refinement_set_type, e);
758
759 e->entries = NULL;
760 e->len = 0;
761 e->capa = 0;
762
763 rb_gc_declare_weak_references(obj);
764
765 return obj;
766}
767
768void
769rb_vm_insert_cc_refinement(const struct rb_callcache *cc)
770{
771 rb_vm_t *vm = GET_VM();
772 RB_VM_LOCK_ENTER();
773 {
774 struct cc_refinement_entries *e = RTYPEDDATA_GET_DATA(vm->cc_refinement_set);
775 if (e->len == e->capa) {
776 size_t new_capa = e->capa == 0 ? 16 : e->capa * 2;
777 SIZED_REALLOC_N(e->entries, VALUE, new_capa, e->capa);
778 e->capa = new_capa;
779 }
780 e->entries[e->len++] = (VALUE)cc;
781
782 // We never mark the cc, but we need to issue a writebarrier so that
783 // the refinement set can be added to the remembered set
784 RB_OBJ_WRITTEN(vm->cc_refinement_set, Qundef, (VALUE)cc);
785 }
786 RB_VM_LOCK_LEAVE();
787}
788
789void
790rb_clear_all_refinement_method_cache(void)
791{
792 rb_vm_t *vm = GET_VM();
793
794 RB_VM_LOCK_ENTER();
795 {
796 struct cc_refinement_entries *e = RTYPEDDATA_GET_DATA(vm->cc_refinement_set);
797 for (size_t i = 0; i < e->len; i++) {
798 VALUE v = e->entries[i];
799
800 // All objects should be live as weak references are pruned in
801 // cc_refinement_set_handle_weak_references
802 VM_ASSERT(rb_gc_pointer_to_heap_p(v));
803 VM_ASSERT(!rb_objspace_garbage_object_p(v));
804
805 const struct rb_callcache *cc = (const struct rb_callcache *)v;
806 VM_ASSERT(vm_cc_refinement_p(cc));
807
808 if (vm_cc_valid(cc)) {
809 vm_cc_invalidate(cc);
810 }
811 }
812 e->len = 0;
813 }
814 RB_VM_LOCK_LEAVE();
815
816 rb_yjit_invalidate_all_method_lookup_assumptions();
817}
818
819void
820rb_method_table_insert(VALUE klass, struct rb_id_table *table, ID method_id, const rb_method_entry_t *me)
821{
822 RB_VM_LOCKING() {
823 rb_method_table_insert0(klass, table, method_id, me, RB_TYPE_P(klass, T_ICLASS) && !RICLASS_OWNS_M_TBL_P(klass));
824 }
825}
826
827void
828rb_method_table_insert0(VALUE klass, struct rb_id_table *table, ID method_id, const rb_method_entry_t *me, bool iclass_shared_mtbl)
829{
830 VALUE table_owner = klass;
831 if (iclass_shared_mtbl) {
832 table_owner = RBASIC(table_owner)->klass;
833 }
834 VM_ASSERT_TYPE3(table_owner, T_CLASS, T_ICLASS, T_MODULE);
835 rb_id_table_insert(table, method_id, (VALUE)me);
836 RB_OBJ_WRITTEN(table_owner, Qundef, (VALUE)me);
837}
838
839// rb_f_notimplement has an extra trailing argument to distinguish it from other methods
840// at compile-time to override arity to be -1. But the trailing argument introduces a
841// signature mismatch between caller and callee, so rb_define_method family inserts a
842// method entry with rb_f_notimplement_internal, which has canonical arity=-1 signature,
843// instead of rb_f_notimplement.
844NORETURN(static VALUE rb_f_notimplement_internal(int argc, const VALUE *argv, VALUE obj));
845
846static VALUE
847rb_f_notimplement_internal(int argc, const VALUE *argv, VALUE obj)
848{
850
852}
853
854VALUE
855rb_f_notimplement(int argc, const VALUE *argv, VALUE obj, VALUE marker)
856{
857 rb_f_notimplement_internal(argc, argv, obj);
858}
859
860static void
861rb_define_notimplement_method_id(VALUE mod, ID id, rb_method_visibility_t visi)
862{
863 rb_add_method(mod, id, VM_METHOD_TYPE_NOTIMPLEMENTED, (void *)1, visi);
864}
865
866void
867rb_add_method_cfunc(VALUE klass, ID mid, VALUE (*func)(ANYARGS), int argc, rb_method_visibility_t visi)
868{
869 if (argc < -2 || 15 < argc) rb_raise(rb_eArgError, "arity out of range: %d for -2..15", argc);
870 if (func != (VALUE(*)(ANYARGS))rb_f_notimplement) {
872 opt.func = func;
873 opt.argc = argc;
874 rb_add_method(klass, mid, VM_METHOD_TYPE_CFUNC, &opt, visi);
875 }
876 else {
877 rb_define_notimplement_method_id(klass, mid, visi);
878 }
879}
880
881void
882rb_add_method_optimized(VALUE klass, ID mid, enum method_optimized_type opt_type, unsigned int index, rb_method_visibility_t visi)
883{
885 .type = opt_type,
886 .index = index,
887 };
888 rb_add_method(klass, mid, VM_METHOD_TYPE_OPTIMIZED, &opt, visi);
889}
890
891static void
892method_definition_release(rb_method_definition_t *def)
893{
894 if (def != NULL) {
895 const unsigned int reference_count_was = RUBY_ATOMIC_FETCH_SUB(def->reference_count, 1);
896
897 RUBY_ASSERT_ALWAYS(reference_count_was != 0);
898
899 if (reference_count_was == 1) {
900 if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:1->0 (remove)\n", (void *)def,
901 rb_id2name(def->original_id));
902 SIZED_FREE(def);
903 }
904 else {
905 if (METHOD_DEBUG) fprintf(stderr, "-%p-%s:%d->%d (dec)\n", (void *)def, rb_id2name(def->original_id),
906 reference_count_was, reference_count_was - 1);
907 }
908 }
909}
910
911void
912rb_method_definition_release(rb_method_definition_t *def)
913{
914 method_definition_release(def);
915}
916
917static void delete_overloaded_cme(const rb_callable_method_entry_t *cme);
918
919void
920rb_free_method_entry_vm_weak_references(const rb_method_entry_t *me)
921{
922 if (me->def && me->def->iseq_overload) {
923 delete_overloaded_cme((const rb_callable_method_entry_t *)me);
924 }
925}
926
927void
928rb_free_method_entry(const rb_method_entry_t *me)
929{
930#if USE_ZJIT
931 if (METHOD_ENTRY_CACHED(me)) {
932 rb_zjit_cme_free((const rb_callable_method_entry_t *)me);
933 }
934#endif
935
936#if USE_YJIT
937 // YJIT rb_yjit_root_mark() roots CMEs in `Invariants`,
938 // to remove from `Invariants` here.
939#endif
940
941 method_definition_release(me->def);
942}
943
944static inline rb_method_entry_t *search_method(VALUE klass, ID id, VALUE *defined_class_ptr);
945extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
946
947static VALUE
948(*call_cfunc_invoker_func(int argc))(VALUE recv, int argc, const VALUE *, VALUE (*func)(ANYARGS))
949{
950 if (!GET_THREAD()->ext_config.ractor_safe) {
951 switch (argc) {
952 case -2: return &call_cfunc_m2;
953 case -1: return &call_cfunc_m1;
954 case 0: return &call_cfunc_0;
955 case 1: return &call_cfunc_1;
956 case 2: return &call_cfunc_2;
957 case 3: return &call_cfunc_3;
958 case 4: return &call_cfunc_4;
959 case 5: return &call_cfunc_5;
960 case 6: return &call_cfunc_6;
961 case 7: return &call_cfunc_7;
962 case 8: return &call_cfunc_8;
963 case 9: return &call_cfunc_9;
964 case 10: return &call_cfunc_10;
965 case 11: return &call_cfunc_11;
966 case 12: return &call_cfunc_12;
967 case 13: return &call_cfunc_13;
968 case 14: return &call_cfunc_14;
969 case 15: return &call_cfunc_15;
970 default:
971 rb_bug("unsupported length: %d", argc);
972 }
973 }
974 else {
975 switch (argc) {
976 case -2: return &ractor_safe_call_cfunc_m2;
977 case -1: return &ractor_safe_call_cfunc_m1;
978 case 0: return &ractor_safe_call_cfunc_0;
979 case 1: return &ractor_safe_call_cfunc_1;
980 case 2: return &ractor_safe_call_cfunc_2;
981 case 3: return &ractor_safe_call_cfunc_3;
982 case 4: return &ractor_safe_call_cfunc_4;
983 case 5: return &ractor_safe_call_cfunc_5;
984 case 6: return &ractor_safe_call_cfunc_6;
985 case 7: return &ractor_safe_call_cfunc_7;
986 case 8: return &ractor_safe_call_cfunc_8;
987 case 9: return &ractor_safe_call_cfunc_9;
988 case 10: return &ractor_safe_call_cfunc_10;
989 case 11: return &ractor_safe_call_cfunc_11;
990 case 12: return &ractor_safe_call_cfunc_12;
991 case 13: return &ractor_safe_call_cfunc_13;
992 case 14: return &ractor_safe_call_cfunc_14;
993 case 15: return &ractor_safe_call_cfunc_15;
994 default:
995 rb_bug("unsupported length: %d", argc);
996 }
997 }
998}
999
1000static void
1001setup_method_cfunc_struct(rb_method_cfunc_t *cfunc, VALUE (*func)(ANYARGS), int argc)
1002{
1003 cfunc->func = func;
1004 cfunc->argc = argc;
1005 cfunc->invoker = call_cfunc_invoker_func(argc);
1006}
1007
1008
1010method_definition_addref(rb_method_definition_t *def, bool complemented)
1011{
1012 unsigned int reference_count_was = RUBY_ATOMIC_FETCH_ADD(def->reference_count, 1);
1013 if (!complemented && reference_count_was > 0) {
1014 /* TODO: A Ractor can reach this via UnboundMethod#bind */
1015 def->aliased = true;
1016 }
1017 if (METHOD_DEBUG) fprintf(stderr, "+%p-%s:%d->%d\n", (void *)def, rb_id2name(def->original_id), reference_count_was, reference_count_was+1);
1018
1019 return def;
1020}
1021
1022void
1023rb_method_definition_addref(rb_method_definition_t *def)
1024{
1025 method_definition_addref(def, false);
1026}
1027
1028void
1029rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts)
1030{
1031 method_definition_release(me->def);
1032 *(rb_method_definition_t **)&me->def = method_definition_addref(def, METHOD_ENTRY_COMPLEMENTED(me));
1033
1034 if (!ruby_running) add_opt_method_entry(me);
1035
1036 if (opts != NULL) {
1037 switch (def->type) {
1038 case VM_METHOD_TYPE_ISEQ:
1039 {
1040 rb_method_iseq_t *iseq_body = (rb_method_iseq_t *)opts;
1041 const rb_iseq_t *iseq = iseq_body->iseqptr;
1042 rb_cref_t *method_cref, *cref = iseq_body->cref;
1043
1044 /* setup iseq first (before invoking GC) */
1045 RB_OBJ_WRITE(me, &def->body.iseq.iseqptr, iseq);
1046
1047 // Methods defined in `with_jit` should be considered METHOD_ENTRY_BASIC
1048 if (rb_iseq_attr_p(iseq, BUILTIN_ATTR_C_TRACE)) {
1049 METHOD_ENTRY_BASIC_SET((rb_method_entry_t *)me, TRUE);
1050 }
1051
1052 if (ISEQ_BODY(iseq)->mandatory_only_iseq) def->iseq_overload = 1;
1053
1054 if (0) vm_cref_dump("rb_method_definition_create", cref);
1055
1056 if (cref) {
1057 method_cref = cref;
1058 }
1059 else {
1060 method_cref = vm_cref_new_toplevel(GET_EC()); /* TODO: can we reuse? */
1061 }
1062
1063 RB_OBJ_WRITE(me, &def->body.iseq.cref, method_cref);
1064 return;
1065 }
1066 case VM_METHOD_TYPE_CFUNC:
1067 {
1068 rb_method_cfunc_t *cfunc = (rb_method_cfunc_t *)opts;
1069 setup_method_cfunc_struct(UNALIGNED_MEMBER_PTR(def, body.cfunc), cfunc->func, cfunc->argc);
1070 return;
1071 }
1072 case VM_METHOD_TYPE_ATTRSET:
1073 case VM_METHOD_TYPE_IVAR:
1074 {
1075 const rb_execution_context_t *ec = GET_EC();
1076 rb_control_frame_t *cfp;
1077 int line;
1078
1079 def->body.attr.id = (ID)(VALUE)opts;
1080
1081 cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
1082
1083 if (cfp && (line = rb_vm_get_sourceline(cfp))) {
1084 VALUE location = rb_ary_new3(2, rb_iseq_path(cfp->iseq), INT2FIX(line));
1085 rb_ary_freeze(location);
1086 RB_OBJ_SET_SHAREABLE(location);
1087 RB_OBJ_WRITE(me, &def->body.attr.location, location);
1088 }
1089 else {
1090 VM_ASSERT(def->body.attr.location == 0);
1091 }
1092 return;
1093 }
1094 case VM_METHOD_TYPE_BMETHOD:
1095 RB_OBJ_WRITE(me, &def->body.bmethod.proc, (VALUE)opts);
1096 def->body.bmethod.defined_ractor_id = rb_ec_ractor_id(GET_EC());
1097 return;
1098 case VM_METHOD_TYPE_NOTIMPLEMENTED:
1099 setup_method_cfunc_struct(UNALIGNED_MEMBER_PTR(def, body.cfunc), (VALUE(*)(ANYARGS))rb_f_notimplement_internal, -1);
1100 return;
1101 case VM_METHOD_TYPE_OPTIMIZED:
1102 def->body.optimized = *(rb_method_optimized_t *)opts;
1103 return;
1104 case VM_METHOD_TYPE_REFINED:
1105 {
1106 RB_OBJ_WRITE(me, &def->body.refined.orig_me, (rb_method_entry_t *)opts);
1107 return;
1108 }
1109 case VM_METHOD_TYPE_ALIAS:
1110 RB_OBJ_WRITE(me, &def->body.alias.original_me, (rb_method_entry_t *)opts);
1111 return;
1112 case VM_METHOD_TYPE_ZSUPER:
1113 case VM_METHOD_TYPE_UNDEF:
1114 case VM_METHOD_TYPE_MISSING:
1115 return;
1116 }
1117 }
1118}
1119
1120static void
1121method_definition_reset(const rb_method_entry_t *me)
1122{
1123 rb_method_definition_t *def = me->def;
1124
1125 switch (def->type) {
1126 case VM_METHOD_TYPE_ISEQ:
1127 RB_OBJ_WRITTEN(me, Qundef, def->body.iseq.iseqptr);
1128 RB_OBJ_WRITTEN(me, Qundef, def->body.iseq.cref);
1129 break;
1130 case VM_METHOD_TYPE_ATTRSET:
1131 case VM_METHOD_TYPE_IVAR:
1132 RB_OBJ_WRITTEN(me, Qundef, def->body.attr.location);
1133 break;
1134 case VM_METHOD_TYPE_BMETHOD:
1135 RB_OBJ_WRITTEN(me, Qundef, def->body.bmethod.proc);
1136 break;
1137 case VM_METHOD_TYPE_REFINED:
1138 RB_OBJ_WRITTEN(me, Qundef, def->body.refined.orig_me);
1139 break;
1140 case VM_METHOD_TYPE_ALIAS:
1141 RB_OBJ_WRITTEN(me, Qundef, def->body.alias.original_me);
1142 break;
1143 case VM_METHOD_TYPE_CFUNC:
1144 case VM_METHOD_TYPE_ZSUPER:
1145 case VM_METHOD_TYPE_MISSING:
1146 case VM_METHOD_TYPE_OPTIMIZED:
1147 case VM_METHOD_TYPE_UNDEF:
1148 case VM_METHOD_TYPE_NOTIMPLEMENTED:
1149 break;
1150 }
1151}
1152
1153static rb_atomic_t method_serial = 1;
1154
1156rb_method_definition_create(rb_method_type_t type, ID mid)
1157{
1160 def->type = type;
1161 def->original_id = mid;
1162 def->method_serial = (uintptr_t)RUBY_ATOMIC_FETCH_ADD(method_serial, 1);
1163 def->box = rb_current_box();
1164 return def;
1165}
1166
1167static rb_method_entry_t *
1168rb_method_entry_alloc(ID called_id, VALUE owner, VALUE defined_class, rb_method_definition_t *def, bool complement)
1169{
1170 if (def) method_definition_addref(def, complement);
1171 if (RTEST(defined_class)) {
1172 // not negative cache
1173 VM_ASSERT_TYPE2(defined_class, T_CLASS, T_ICLASS);
1174 }
1175 rb_method_entry_t *me = SHAREABLE_IMEMO_NEW(rb_method_entry_t, imemo_ment, defined_class);
1176
1177 // mark_and_move_method_entry pins itself when it is in the overloaded_cme table
1178 rb_gc_register_pinning_obj((VALUE)me);
1179
1180 *((rb_method_definition_t **)&me->def) = def;
1181 me->called_id = called_id;
1182 me->owner = owner;
1183
1184 return me;
1185}
1186
1187static VALUE
1188filter_defined_class(VALUE klass)
1189{
1190 switch (BUILTIN_TYPE(klass)) {
1191 case T_CLASS:
1192 return klass;
1193 case T_MODULE:
1194 return 0;
1195 case T_ICLASS:
1196 break;
1197 default:
1198 break;
1199 }
1200 rb_bug("filter_defined_class: %s", rb_obj_info(klass));
1201}
1202
1204rb_method_entry_create(ID called_id, VALUE klass, rb_method_visibility_t visi, rb_method_definition_t *def)
1205{
1206 rb_method_entry_t *me = rb_method_entry_alloc(called_id, klass, filter_defined_class(klass), def, false);
1207 METHOD_ENTRY_FLAGS_SET(me, visi, ruby_running ? FALSE : TRUE);
1208 if (def != NULL) method_definition_reset(me);
1209 return me;
1210}
1211
1212// Return a cloned ME that's not invalidated (MEs are disposable for caching).
1213const rb_method_entry_t *
1214rb_method_entry_clone(const rb_method_entry_t *src_me)
1215{
1216 rb_method_entry_t *me = rb_method_entry_alloc(src_me->called_id, src_me->owner, src_me->defined_class, src_me->def, METHOD_ENTRY_COMPLEMENTED(src_me));
1217
1218 METHOD_ENTRY_FLAGS_COPY(me, src_me);
1219
1220 // Also clone inner ME in case of refinement ME
1221 if (src_me->def &&
1222 src_me->def->type == VM_METHOD_TYPE_REFINED &&
1223 src_me->def->body.refined.orig_me) {
1224 const rb_method_entry_t *orig_me = src_me->def->body.refined.orig_me;
1225 VM_ASSERT(orig_me->def->type != VM_METHOD_TYPE_REFINED);
1226
1227 rb_method_entry_t *orig_clone = rb_method_entry_alloc(orig_me->called_id,
1228 orig_me->owner, orig_me->defined_class, orig_me->def, METHOD_ENTRY_COMPLEMENTED(orig_me));
1229 METHOD_ENTRY_FLAGS_COPY(orig_clone, orig_me);
1230
1231 // Clone definition, since writing a VALUE to a shared definition
1232 // can create reference edges we can't run WBs for.
1233 rb_method_definition_t *clone_def =
1234 rb_method_definition_create(VM_METHOD_TYPE_REFINED, src_me->called_id);
1235 rb_method_definition_set(me, clone_def, orig_clone);
1236 }
1237 return me;
1238}
1239
1241rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID called_id, VALUE defined_class)
1242{
1243 rb_method_definition_t *def = src_me->def;
1245 const rb_method_entry_t *refined_orig_me = NULL;
1246
1247 if (!src_me->defined_class &&
1248 def->type == VM_METHOD_TYPE_REFINED &&
1249 def->body.refined.orig_me) {
1250 const rb_method_entry_t *orig_me =
1251 rb_method_entry_clone(def->body.refined.orig_me);
1252 RB_OBJ_WRITE((VALUE)orig_me, &orig_me->defined_class, defined_class);
1253 refined_orig_me = orig_me;
1254 def = NULL;
1255 }
1256
1257 me = rb_method_entry_alloc(called_id, src_me->owner, defined_class, def, true);
1258 METHOD_ENTRY_FLAGS_COPY(me, src_me);
1259 METHOD_ENTRY_COMPLEMENTED_SET(me);
1260 if (!def) {
1261 def = rb_method_definition_create(VM_METHOD_TYPE_REFINED, called_id);
1262 rb_method_definition_set(me, def, (void *)refined_orig_me);
1263 }
1264
1265 VM_ASSERT_TYPE(me->owner, T_MODULE);
1266
1267 return (rb_callable_method_entry_t *)me;
1268}
1269
1270void
1271rb_method_entry_copy(rb_method_entry_t *dst, const rb_method_entry_t *src)
1272{
1273 method_definition_release(dst->def);
1274 *(rb_method_definition_t **)&dst->def = method_definition_addref(src->def, METHOD_ENTRY_COMPLEMENTED(src));
1275 method_definition_reset(dst);
1276 dst->called_id = src->called_id;
1277 RB_OBJ_WRITE((VALUE)dst, &dst->owner, src->owner);
1278 RB_OBJ_WRITE((VALUE)dst, &dst->defined_class, src->defined_class);
1279 METHOD_ENTRY_FLAGS_COPY(dst, src);
1280}
1281
1282static void
1283make_method_entry_refined(VALUE owner, rb_method_entry_t *me)
1284{
1285 if (me->def->type == VM_METHOD_TYPE_REFINED) {
1286 return;
1287 }
1288 else {
1290
1291 rb_vm_check_redefinition_opt_method(me, me->owner);
1292
1293 struct rb_method_entry_struct *orig_me =
1294 rb_method_entry_alloc(me->called_id,
1295 me->owner,
1296 me->defined_class,
1297 me->def,
1298 true);
1299 METHOD_ENTRY_FLAGS_COPY(orig_me, me);
1300
1301 def = rb_method_definition_create(VM_METHOD_TYPE_REFINED, me->called_id);
1302 rb_method_definition_set(me, def, orig_me);
1303 METHOD_ENTRY_VISI_SET(me, METHOD_VISI_PUBLIC);
1304 }
1305}
1306
1307static inline rb_method_entry_t *
1308lookup_method_table(VALUE klass, ID id)
1309{
1310 st_data_t body;
1311 struct rb_id_table *m_tbl = RCLASS_M_TBL(klass);
1312
1313 if (rb_id_table_lookup(m_tbl, id, &body)) {
1314 return (rb_method_entry_t *) body;
1315 }
1316 else {
1317 return 0;
1318 }
1319}
1320
1321void
1322rb_add_refined_method_entry(VALUE refined_class, ID mid)
1323{
1324 rb_method_entry_t *me = lookup_method_table(refined_class, mid);
1325
1326 if (me) {
1327 make_method_entry_refined(refined_class, me);
1328 rb_clear_method_cache(refined_class, mid);
1329 }
1330 else {
1331 rb_add_method(refined_class, mid, VM_METHOD_TYPE_REFINED, 0, METHOD_VISI_PUBLIC);
1332 }
1333}
1334
1335static void
1336check_override_opt_method_i(VALUE klass, VALUE arg)
1337{
1338 ID mid = (ID)arg;
1339 const rb_method_entry_t *me, *newme;
1340
1341 if (vm_redefinition_check_flag(klass)) {
1342 me = lookup_method_table(RCLASS_ORIGIN(klass), mid);
1343 if (me) {
1344 newme = rb_method_entry(klass, mid);
1345 if (newme != me) rb_vm_check_redefinition_opt_method(me, me->owner);
1346 }
1347 }
1348 rb_class_foreach_subclass(klass, check_override_opt_method_i, (VALUE)mid);
1349}
1350
1351static void
1352check_override_opt_method(VALUE klass, VALUE mid)
1353{
1354 if (rb_vm_check_optimizable_mid(mid)) {
1355 check_override_opt_method_i(klass, mid);
1356 }
1357}
1358
1359static inline rb_method_entry_t* search_method0(VALUE klass, ID id, VALUE *defined_class_ptr, bool skip_refined);
1360/*
1361 * klass->method_table[mid] = method_entry(defined_class, visi, def)
1362 *
1363 * If def is given (!= NULL), then just use it and ignore original_id and otps.
1364 * If not given, then make a new def with original_id and opts.
1365 */
1366static rb_method_entry_t *
1367rb_method_entry_make(VALUE klass, ID mid, VALUE defined_class, rb_method_visibility_t visi,
1368 rb_method_type_t type, rb_method_definition_t *def, ID original_id, void *opts)
1369{
1371 struct rb_id_table *mtbl;
1372 st_data_t data;
1373 int make_refined = 0;
1374 VALUE orig_klass;
1375
1376 if (NIL_P(klass)) {
1377 klass = rb_cObject;
1378 }
1379 orig_klass = klass;
1380
1381 if (!RCLASS_SINGLETON_P(klass) &&
1382 type != VM_METHOD_TYPE_NOTIMPLEMENTED &&
1383 type != VM_METHOD_TYPE_ZSUPER) {
1384 switch (mid) {
1385 case idInitialize:
1386 case idInitialize_copy:
1387 case idInitialize_clone:
1388 case idInitialize_dup:
1389 case idRespond_to_missing:
1390 visi = METHOD_VISI_PRIVATE;
1391 }
1392 }
1393
1394 if (type != VM_METHOD_TYPE_REFINED) {
1395 rb_class_modify_check(klass);
1396 }
1397
1398 if (RB_TYPE_P(klass, T_MODULE) && FL_TEST(klass, RMODULE_IS_REFINEMENT)) {
1399 VALUE refined_class = rb_refinement_module_get_refined_class(klass);
1400 bool search_superclass = type == VM_METHOD_TYPE_ZSUPER && !lookup_method_table(refined_class, mid);
1401 rb_add_refined_method_entry(refined_class, mid);
1402 if (search_superclass) {
1403 rb_method_entry_t *me = lookup_method_table(refined_class, mid);
1404 me->def->body.refined.orig_me = search_method0(refined_class, mid, NULL, true);
1405 }
1406 }
1407 if (type == VM_METHOD_TYPE_REFINED) {
1408 rb_method_entry_t *old_me = lookup_method_table(RCLASS_ORIGIN(klass), mid);
1409 if (old_me) rb_vm_check_redefinition_opt_method(old_me, klass);
1410 }
1411 else {
1412 klass = RCLASS_ORIGIN(klass);
1413 if (klass != orig_klass) {
1414 rb_clear_method_cache(orig_klass, mid);
1415 }
1416 }
1417 mtbl = RCLASS_WRITABLE_M_TBL(klass);
1418
1419 /* check re-definition */
1420 if (rb_id_table_lookup(mtbl, mid, &data)) {
1421 rb_method_entry_t *old_me = (rb_method_entry_t *)data;
1422 rb_method_definition_t *old_def = old_me->def;
1423
1424 if (rb_method_definition_eq(old_def, def)) return old_me;
1425 rb_vm_check_redefinition_opt_method(old_me, klass);
1426
1427 if (old_def->type == VM_METHOD_TYPE_REFINED) make_refined = 1;
1428
1429 if (RTEST(ruby_verbose) &&
1430 type != VM_METHOD_TYPE_UNDEF &&
1431 (old_def->aliased == false) &&
1432 (!old_def->no_redef_warning) &&
1433 !make_refined &&
1434 old_def->type != VM_METHOD_TYPE_UNDEF &&
1435 old_def->type != VM_METHOD_TYPE_ZSUPER &&
1436 old_def->type != VM_METHOD_TYPE_ALIAS) {
1437 const rb_iseq_t *iseq = 0;
1438
1439 switch (old_def->type) {
1440 case VM_METHOD_TYPE_ISEQ:
1441 iseq = def_iseq_ptr(old_def);
1442 break;
1443 case VM_METHOD_TYPE_BMETHOD:
1444 iseq = rb_proc_get_iseq(old_def->body.bmethod.proc, 0);
1445 break;
1446 default:
1447 break;
1448 }
1449 if (iseq) {
1450 rb_warning(
1451 "method redefined; discarding old %"PRIsVALUE"\n%s:%d: warning: previous definition of %"PRIsVALUE" was here",
1452 rb_id2str(mid),
1453 RSTRING_PTR(rb_iseq_path(iseq)),
1454 ISEQ_BODY(iseq)->location.first_lineno,
1455 rb_id2str(old_def->original_id)
1456 );
1457 }
1458 else {
1459 rb_warning("method redefined; discarding old %"PRIsVALUE, rb_id2str(mid));
1460 }
1461 }
1462 }
1463
1464 /* create method entry */
1465 me = rb_method_entry_create(mid, defined_class, visi, NULL);
1466 if (def == NULL) {
1467 def = rb_method_definition_create(type, original_id);
1468 }
1469 rb_method_definition_set(me, def, opts);
1470
1471 rb_clear_method_cache(klass, mid);
1472
1473 /* check mid */
1474 if (klass == rb_cObject) {
1475 switch (mid) {
1476 case idInitialize:
1477 case idRespond_to_missing:
1478 case idMethodMissing:
1479 case idRespond_to:
1480 rb_warn("redefining Object#%s may cause infinite loop", rb_id2name(mid));
1481 }
1482 }
1483 /* check mid */
1484 if (mid == object_id || mid == id__id__ || mid == id__send__) {
1485 if (type != VM_METHOD_TYPE_CFUNC && search_method(klass, mid, 0)) {
1486 rb_warn("redefining '%s' may cause serious problems", rb_id2name(mid));
1487 }
1488 }
1489
1490 if (make_refined) {
1491 make_method_entry_refined(klass, me);
1492 }
1493
1494 rb_method_table_insert(klass, mtbl, mid, me);
1495
1496 VM_ASSERT(me->def != NULL);
1497
1498 /* check optimized method override by a prepended module */
1499 if (RB_TYPE_P(orig_klass, T_MODULE)) {
1500 check_override_opt_method(klass, (VALUE)mid);
1501 }
1502
1503 return me;
1504}
1505
1506static st_table *
1507overloaded_cme_table(void)
1508{
1509 VM_ASSERT(GET_VM()->overloaded_cme_table != NULL);
1510 return GET_VM()->overloaded_cme_table;
1511}
1512
1513#if VM_CHECK_MODE > 0
1514static int
1515vm_dump_overloaded_cme_table(st_data_t key, st_data_t val, st_data_t dmy)
1516{
1517 fprintf(stderr, "key: "); rp(key);
1518 fprintf(stderr, "val: "); rp(val);
1519 return ST_CONTINUE;
1520}
1521
1522void
1523rb_vm_dump_overloaded_cme_table(void)
1524{
1525 fprintf(stderr, "== rb_vm_dump_overloaded_cme_table\n");
1526 st_foreach(overloaded_cme_table(), vm_dump_overloaded_cme_table, 0);
1527}
1528#endif
1529
1530static int
1531lookup_overloaded_cme_i(st_data_t *key, st_data_t *value, st_data_t data, int existing)
1532{
1533 if (existing) {
1534 const rb_callable_method_entry_t *cme = (const rb_callable_method_entry_t *)*key;
1535 const rb_callable_method_entry_t *monly_cme = (const rb_callable_method_entry_t *)*value;
1536 const rb_callable_method_entry_t **ptr = (const rb_callable_method_entry_t **)data;
1537
1538 if (rb_objspace_garbage_object_p((VALUE)cme) ||
1539 rb_objspace_garbage_object_p((VALUE)monly_cme)) {
1540 *ptr = NULL;
1541 return ST_DELETE;
1542 }
1543 else {
1544 *ptr = monly_cme;
1545 }
1546 }
1547
1548 return ST_STOP;
1549}
1550
1551static const rb_callable_method_entry_t *
1552lookup_overloaded_cme(const rb_callable_method_entry_t *cme)
1553{
1554 ASSERT_vm_locking();
1555
1556 const rb_callable_method_entry_t *monly_cme = NULL;
1557 st_update(overloaded_cme_table(), (st_data_t)cme, lookup_overloaded_cme_i, (st_data_t)&monly_cme);
1558 return monly_cme;
1559}
1560
1561#if VM_CHECK_MODE > 0
1563rb_vm_lookup_overloaded_cme(const rb_callable_method_entry_t *cme)
1564{
1565 return lookup_overloaded_cme(cme);
1566}
1567#endif
1568
1569static void
1570delete_overloaded_cme(const rb_callable_method_entry_t *cme)
1571{
1572 st_data_t cme_data = (st_data_t)cme;
1573 ASSERT_vm_locking();
1574 st_delete(overloaded_cme_table(), &cme_data, NULL);
1575}
1576
1577static const rb_callable_method_entry_t *
1578get_overloaded_cme(const rb_callable_method_entry_t *cme)
1579{
1580 const rb_callable_method_entry_t *monly_cme = lookup_overloaded_cme(cme);
1581
1582 if (monly_cme && !METHOD_ENTRY_INVALIDATED(monly_cme)) {
1583 return monly_cme;
1584 }
1585 else {
1586 // create
1587 rb_method_definition_t *def = rb_method_definition_create(VM_METHOD_TYPE_ISEQ, cme->def->original_id);
1588 rb_method_entry_t *me = rb_method_entry_alloc(cme->called_id,
1589 cme->owner,
1590 cme->defined_class,
1591 def,
1592 false);
1593
1594 RB_OBJ_WRITE(me, &def->body.iseq.cref, cme->def->body.iseq.cref);
1595 RB_OBJ_WRITE(me, &def->body.iseq.iseqptr, ISEQ_BODY(cme->def->body.iseq.iseqptr)->mandatory_only_iseq);
1596
1597 ASSERT_vm_locking();
1598 st_insert(overloaded_cme_table(), (st_data_t)cme, (st_data_t)me);
1599
1600 METHOD_ENTRY_VISI_SET(me, METHOD_ENTRY_VISI(cme));
1601 return (rb_callable_method_entry_t *)me;
1602 }
1603}
1604
1606rb_check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci)
1607{
1608 if (UNLIKELY(cme->def->iseq_overload) &&
1609 (vm_ci_flag(ci) & (VM_CALL_ARGS_SIMPLE)) &&
1610 (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) &&
1611 (int)vm_ci_argc(ci) == ISEQ_BODY(method_entry_iseqptr(cme))->param.lead_num) {
1612 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_ISEQ, "type: %d", cme->def->type); // iseq_overload is marked only on ISEQ methods
1613
1614 cme = get_overloaded_cme(cme);
1615
1616 VM_ASSERT(cme != NULL);
1617 METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
1618 }
1619
1620 return cme;
1621}
1622
1623#define CALL_METHOD_HOOK(klass, hook, mid) do { \
1624 const VALUE arg = ID2SYM(mid); \
1625 VALUE recv_class = (klass); \
1626 ID hook_id = (hook); \
1627 if (RCLASS_SINGLETON_P((klass))) { \
1628 recv_class = RCLASS_ATTACHED_OBJECT((klass)); \
1629 hook_id = singleton_##hook; \
1630 } \
1631 rb_funcallv(recv_class, hook_id, 1, &arg); \
1632 } while (0)
1633
1634static void
1635method_added(VALUE klass, ID mid)
1636{
1637 if (ruby_running) {
1638 CALL_METHOD_HOOK(klass, added, mid);
1639 }
1640}
1641
1642void
1643rb_add_method(VALUE klass, ID mid, rb_method_type_t type, void *opts, rb_method_visibility_t visi)
1644{
1645 RB_VM_LOCKING() {
1646 rb_method_entry_make(klass, mid, klass, visi, type, NULL, mid, opts);
1647 }
1648
1649 if (type != VM_METHOD_TYPE_UNDEF && type != VM_METHOD_TYPE_REFINED) {
1650 method_added(klass, mid);
1651 }
1652}
1653
1654void
1655rb_add_method_iseq(VALUE klass, ID mid, const rb_iseq_t *iseq, rb_cref_t *cref, rb_method_visibility_t visi)
1656{
1657 struct { /* should be same fields with rb_method_iseq_struct */
1658 const rb_iseq_t *iseqptr;
1659 rb_cref_t *cref;
1660 } iseq_body;
1661
1662 iseq_body.iseqptr = iseq;
1663 iseq_body.cref = cref;
1664
1665 rb_add_method(klass, mid, VM_METHOD_TYPE_ISEQ, &iseq_body, visi);
1666}
1667
1668static rb_method_entry_t *
1669method_entry_set(VALUE klass, ID mid, const rb_method_entry_t *me,
1670 rb_method_visibility_t visi, VALUE defined_class)
1671{
1672 rb_method_entry_t *newme;
1673 RB_VM_LOCKING() {
1674 newme = rb_method_entry_make(klass, mid, defined_class, visi,
1675 me->def->type, me->def, 0, NULL);
1676 if (newme == me) {
1677 me->def->no_redef_warning = TRUE;
1678 METHOD_ENTRY_FLAGS_SET(newme, visi, FALSE);
1679 }
1680 }
1681
1682 method_added(klass, mid);
1683 return newme;
1684}
1685
1687rb_method_entry_set(VALUE klass, ID mid, const rb_method_entry_t *me, rb_method_visibility_t visi)
1688{
1689 return method_entry_set(klass, mid, me, visi, klass);
1690}
1691
1692#define UNDEF_ALLOC_FUNC ((rb_alloc_func_t)-1)
1693
1694void
1695rb_define_alloc_func(VALUE klass, VALUE (*func)(VALUE))
1696{
1697 Check_Type(klass, T_CLASS);
1698 if (RCLASS_SINGLETON_P(klass)) {
1699 rb_raise(rb_eTypeError, "can't define an allocator for a singleton class");
1700 }
1701 RCLASS_SET_ALLOCATOR(klass, func);
1702}
1703
1704void
1706{
1707 rb_define_alloc_func(klass, UNDEF_ALLOC_FUNC);
1708}
1709
1712{
1713 RBIMPL_ASSERT_TYPE(klass, T_CLASS);
1714
1715 rb_alloc_func_t allocator = RCLASS_ALLOCATOR(klass);
1716 if (allocator == UNDEF_ALLOC_FUNC) return 0;
1717 if (allocator) return allocator;
1718
1719 VALUE *superclasses = RCLASS_SUPERCLASSES(klass);
1720 size_t depth = RCLASS_SUPERCLASS_DEPTH(klass);
1721
1722 for (size_t i = depth; i > 0; i--) {
1723 klass = superclasses[i - 1];
1724 RBIMPL_ASSERT_TYPE(klass, T_CLASS);
1725
1726 allocator = RCLASS_ALLOCATOR(klass);
1727 if (allocator == UNDEF_ALLOC_FUNC) break;
1728 if (allocator) return allocator;
1729 }
1730 return 0;
1731}
1732
1733const rb_method_entry_t *
1734rb_method_entry_at(VALUE klass, ID id)
1735{
1736 return lookup_method_table(klass, id);
1737}
1738
1739static inline rb_method_entry_t*
1740search_method0(VALUE klass, ID id, VALUE *defined_class_ptr, bool skip_refined)
1741{
1742 rb_method_entry_t *me = NULL;
1743
1744 RB_DEBUG_COUNTER_INC(mc_search);
1745
1746 for (; klass; klass = RCLASS_SUPER(klass)) {
1747 RB_DEBUG_COUNTER_INC(mc_search_super);
1748 if ((me = lookup_method_table(klass, id)) != 0) {
1749 if (!skip_refined || me->def->type != VM_METHOD_TYPE_REFINED ||
1750 me->def->body.refined.orig_me) {
1751 break;
1752 }
1753 }
1754 }
1755
1756 if (defined_class_ptr) *defined_class_ptr = klass;
1757
1758 if (me == NULL) RB_DEBUG_COUNTER_INC(mc_search_notfound);
1759
1760 VM_ASSERT(me == NULL || !METHOD_ENTRY_INVALIDATED(me),
1761 "invalid me, mid:%s, klass:%s(%s)",
1762 rb_id2name(id),
1763 RTEST(rb_mod_name(klass)) ? RSTRING_PTR(rb_mod_name(klass)) : "anonymous",
1764 rb_obj_info(klass));
1765 return me;
1766}
1767
1768static inline rb_method_entry_t*
1769search_method(VALUE klass, ID id, VALUE *defined_class_ptr)
1770{
1771 return search_method0(klass, id, defined_class_ptr, false);
1772}
1773
1774static rb_method_entry_t *
1775search_method_protect(VALUE klass, ID id, VALUE *defined_class_ptr)
1776{
1777 rb_method_entry_t *me = search_method(klass, id, defined_class_ptr);
1778
1779 if (!UNDEFINED_METHOD_ENTRY_P(me)) {
1780 return me;
1781 }
1782 else {
1783 return NULL;
1784 }
1785}
1786
1787const rb_method_entry_t *
1788rb_method_entry(VALUE klass, ID id)
1789{
1790 return search_method_protect(klass, id, NULL);
1791}
1792
1793static inline const rb_callable_method_entry_t *
1794prepare_callable_method_entry(VALUE defined_class, ID id, const rb_method_entry_t * const me, int create)
1795{
1796 struct rb_id_table *mtbl;
1797 const rb_callable_method_entry_t *cme;
1798 VALUE cme_data;
1799 int cme_found = 0;
1800
1801 if (me) {
1802 if (me->defined_class == 0) {
1803 RB_DEBUG_COUNTER_INC(mc_cme_complement);
1804 VM_ASSERT_TYPE2(defined_class, T_ICLASS, T_MODULE);
1805
1806 mtbl = RCLASS_WRITABLE_CALLABLE_M_TBL(defined_class);
1807 if (mtbl && rb_id_table_lookup(mtbl, id, &cme_data)) {
1808 cme = (rb_callable_method_entry_t *)cme_data;
1809 cme_found = 1;
1810 }
1811 if (cme_found) {
1812 RB_DEBUG_COUNTER_INC(mc_cme_complement_hit);
1813 VM_ASSERT(callable_method_entry_p(cme));
1814 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(cme));
1815 }
1816 else if (create) {
1817 if (!mtbl) {
1818 mtbl = rb_id_table_create(0);
1819 RCLASS_WRITE_CALLABLE_M_TBL(defined_class, mtbl);
1820 }
1821 cme = rb_method_entry_complement_defined_class(me, me->called_id, defined_class);
1822 rb_id_table_insert(mtbl, id, (VALUE)cme);
1823 RB_OBJ_WRITTEN(defined_class, Qundef, (VALUE)cme);
1824 VM_ASSERT(callable_method_entry_p(cme));
1825 }
1826 else {
1827 return NULL;
1828 }
1829 }
1830 else {
1831 cme = (const rb_callable_method_entry_t *)me;
1832 VM_ASSERT(callable_method_entry_p(cme));
1833 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(cme));
1834 }
1835 return cme;
1836 }
1837 else {
1838 return NULL;
1839 }
1840}
1841
1842static const rb_callable_method_entry_t *
1843complemented_callable_method_entry(VALUE klass, ID id)
1844{
1845 VALUE defined_class;
1846 rb_method_entry_t *me = search_method(klass, id, &defined_class);
1847 return prepare_callable_method_entry(defined_class, id, me, FALSE);
1848}
1849
1850static const rb_callable_method_entry_t *
1851cached_callable_method_entry(VALUE klass, ID mid)
1852{
1853 ASSERT_vm_locking();
1854
1855 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
1856 VALUE ccs_data;
1857
1858 if (cc_tbl && rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1859 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
1860 VM_ASSERT(vm_ccs_p(ccs));
1861
1862 if (LIKELY(!METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1863 VM_ASSERT(ccs->cme->called_id == mid);
1864 RB_DEBUG_COUNTER_INC(ccs_found);
1865 return ccs->cme;
1866 }
1867 else {
1868 rb_vm_barrier();
1869
1870 rb_managed_id_table_delete(cc_tbl, mid);
1871 rb_vm_ccs_invalidate_and_free(ccs);
1872 }
1873 }
1874
1875 RB_DEBUG_COUNTER_INC(ccs_not_found);
1876 return NULL;
1877}
1878
1879static void
1880cache_callable_method_entry(VALUE klass, ID mid, const rb_callable_method_entry_t *cme)
1881{
1882 ASSERT_vm_locking();
1883 VM_ASSERT(cme != NULL);
1884
1885 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
1886 VALUE ccs_data;
1887
1888 if (!cc_tbl) {
1889 cc_tbl = rb_vm_cc_table_create(2);
1890 RCLASS_WRITE_CC_TBL(klass, cc_tbl);
1891 }
1892
1893 if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1894#if VM_CHECK_MODE > 0
1895 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
1896 VM_ASSERT(ccs->cme == cme);
1897#endif
1898 }
1899 else {
1900 if (rb_multi_ractor_p()) {
1901 VALUE new_cc_tbl = rb_vm_cc_table_dup(cc_tbl);
1902 vm_ccs_create(klass, new_cc_tbl, mid, cme);
1903 RB_OBJ_ATOMIC_WRITE(klass, &RCLASSEXT_CC_TBL(RCLASS_EXT_WRITABLE(klass)), new_cc_tbl);
1904 }
1905 else {
1906 vm_ccs_create(klass, cc_tbl, mid, cme);
1907 }
1908 }
1909}
1910
1911static const rb_callable_method_entry_t *
1912negative_cme(ID mid)
1913{
1914 rb_vm_t *vm = GET_VM();
1915 const rb_callable_method_entry_t *cme;
1916 VALUE cme_data;
1917
1918 if (rb_id_table_lookup(vm->negative_cme_table, mid, &cme_data)) {
1919 cme = (rb_callable_method_entry_t *)cme_data;
1920 }
1921 else {
1922 cme = (rb_callable_method_entry_t *)rb_method_entry_alloc(mid, Qnil, Qnil, NULL, false);
1923 rb_id_table_insert(vm->negative_cme_table, mid, (VALUE)cme);
1924 }
1925
1926 VM_ASSERT(cme != NULL);
1927 return cme;
1928}
1929
1930static const rb_callable_method_entry_t *
1931callable_method_entry_or_negative(VALUE klass, ID mid, VALUE *defined_class_ptr)
1932{
1933 const rb_callable_method_entry_t *cme;
1934
1935 VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
1936
1937 /* Fast path: lock-free read from cache */
1938 VALUE cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
1939 if (cc_tbl) {
1940 VALUE ccs_data;
1941 if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_data)) {
1942 struct rb_class_cc_entries *ccs = (struct rb_class_cc_entries *)ccs_data;
1943 VM_ASSERT(vm_ccs_p(ccs));
1944
1945 if (LIKELY(!METHOD_ENTRY_INVALIDATED(ccs->cme))) {
1946 VM_ASSERT(ccs->cme->called_id == mid);
1947 if (defined_class_ptr != NULL) *defined_class_ptr = ccs->cme->defined_class;
1948 RB_DEBUG_COUNTER_INC(ccs_found);
1949 return ccs->cme;
1950 }
1951 }
1952 }
1953
1954 /* Slow path: need to lock and potentially populate cache */
1955 RB_VM_LOCKING() {
1956 cme = cached_callable_method_entry(klass, mid);
1957
1958 if (cme) {
1959 if (defined_class_ptr != NULL) *defined_class_ptr = cme->defined_class;
1960 }
1961 else {
1962 VALUE defined_class;
1963 rb_method_entry_t *me = search_method(klass, mid, &defined_class);
1964 if (defined_class_ptr) *defined_class_ptr = defined_class;
1965
1966 if (me != NULL) {
1967 cme = prepare_callable_method_entry(defined_class, mid, me, TRUE);
1968 }
1969 else {
1970 cme = negative_cme(mid);
1971 }
1972
1973 cache_callable_method_entry(klass, mid, cme);
1974 }
1975 }
1976
1977 return cme;
1978}
1979
1980// This is exposed for YJIT so that we can make assumptions that methods are
1981// not defined.
1983rb_callable_method_entry_or_negative(VALUE klass, ID mid)
1984{
1985 return callable_method_entry_or_negative(klass, mid, NULL);
1986}
1987
1988static const rb_callable_method_entry_t *
1989callable_method_entry(VALUE klass, ID mid, VALUE *defined_class_ptr)
1990{
1991 const rb_callable_method_entry_t *cme;
1992 cme = callable_method_entry_or_negative(klass, mid, defined_class_ptr);
1993 return !UNDEFINED_METHOD_ENTRY_P(cme) ? cme : NULL;
1994}
1995
1997rb_callable_method_entry(VALUE klass, ID mid)
1998{
1999 return callable_method_entry(klass, mid, NULL);
2000}
2001
2002static const rb_method_entry_t *resolve_refined_method(VALUE refinements, const rb_method_entry_t *me, VALUE *defined_class_ptr);
2003
2004static const rb_method_entry_t *
2005method_entry_resolve_refinement(VALUE klass, ID id, int with_refinement, VALUE *defined_class_ptr)
2006{
2007 const rb_method_entry_t *me = search_method_protect(klass, id, defined_class_ptr);
2008
2009 if (me) {
2010 if (me->def->type == VM_METHOD_TYPE_REFINED) {
2011 if (with_refinement) {
2012 const rb_cref_t *cref = rb_vm_cref();
2013 VALUE refinements = cref ? CREF_REFINEMENTS(cref) : Qnil;
2014 me = resolve_refined_method(refinements, me, defined_class_ptr);
2015 }
2016 else {
2017 me = resolve_refined_method(Qnil, me, defined_class_ptr);
2018 }
2019
2020 if (UNDEFINED_METHOD_ENTRY_P(me)) me = NULL;
2021 }
2022 }
2023
2024 return me;
2025}
2026
2027const rb_method_entry_t *
2028rb_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2029{
2030 return method_entry_resolve_refinement(klass, id, TRUE, defined_class_ptr);
2031}
2032
2033static const rb_callable_method_entry_t *
2034callable_method_entry_refinements0(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements,
2035 const rb_callable_method_entry_t *cme)
2036{
2037 if (cme == NULL || LIKELY(cme->def->type != VM_METHOD_TYPE_REFINED)) {
2038 return cme;
2039 }
2040 else {
2041 VALUE defined_class, *dcp = defined_class_ptr ? defined_class_ptr : &defined_class;
2042 const rb_method_entry_t *me = method_entry_resolve_refinement(klass, id, with_refinements, dcp);
2043 return prepare_callable_method_entry(*dcp, id, me, TRUE);
2044 }
2045}
2046
2047static const rb_callable_method_entry_t *
2048callable_method_entry_refinements(VALUE klass, ID id, VALUE *defined_class_ptr, bool with_refinements)
2049{
2050 const rb_callable_method_entry_t *cme = callable_method_entry(klass, id, defined_class_ptr);
2051 return callable_method_entry_refinements0(klass, id, defined_class_ptr, with_refinements, cme);
2052}
2053
2055rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2056{
2057 return callable_method_entry_refinements(klass, id, defined_class_ptr, true);
2058}
2059
2060static const rb_callable_method_entry_t *
2061callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2062{
2063 return callable_method_entry_refinements(klass, id, defined_class_ptr, false);
2064}
2065
2066const rb_method_entry_t *
2067rb_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2068{
2069 return method_entry_resolve_refinement(klass, id, FALSE, defined_class_ptr);
2070}
2071
2073rb_callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class_ptr)
2074{
2075 VALUE defined_class, *dcp = defined_class_ptr ? defined_class_ptr : &defined_class;
2076 const rb_method_entry_t *me = method_entry_resolve_refinement(klass, id, FALSE, dcp);
2077 return prepare_callable_method_entry(*dcp, id, me, TRUE);
2078}
2079
2080static const rb_method_entry_t *
2081resolve_refined_method(VALUE refinements, const rb_method_entry_t *me, VALUE *defined_class_ptr)
2082{
2083 while (me && me->def->type == VM_METHOD_TYPE_REFINED) {
2084 VALUE refinement;
2085 const rb_method_entry_t *tmp_me;
2086 VALUE super;
2087
2088 refinement = find_refinement(refinements, me->owner);
2089 if (!NIL_P(refinement)) {
2090 tmp_me = search_method_protect(refinement, me->called_id, defined_class_ptr);
2091
2092 if (tmp_me && tmp_me->def->type != VM_METHOD_TYPE_REFINED) {
2093 return tmp_me;
2094 }
2095 }
2096
2097 tmp_me = me->def->body.refined.orig_me;
2098 if (tmp_me) {
2099 if (!tmp_me->defined_class) {
2100 VM_ASSERT_TYPE(tmp_me->owner, T_MODULE);
2101 }
2102 else if (defined_class_ptr) {
2103 *defined_class_ptr = tmp_me->defined_class;
2104 }
2105 return tmp_me;
2106 }
2107
2108 super = RCLASS_SUPER(me->owner);
2109 if (!super) {
2110 return 0;
2111 }
2112
2113 me = search_method_protect(super, me->called_id, defined_class_ptr);
2114 }
2115 return me;
2116}
2117
2118const rb_method_entry_t *
2119rb_resolve_refined_method(VALUE refinements, const rb_method_entry_t *me)
2120{
2121 return resolve_refined_method(refinements, me, NULL);
2122}
2123
2125rb_resolve_refined_method_callable(VALUE refinements, const rb_callable_method_entry_t *me)
2126{
2127 VALUE defined_class = me->defined_class;
2128 const rb_method_entry_t *resolved_me = resolve_refined_method(refinements, (const rb_method_entry_t *)me, &defined_class);
2129
2130 if (resolved_me && resolved_me->defined_class == 0) {
2131 return rb_method_entry_complement_defined_class(resolved_me, me->called_id, defined_class);
2132 }
2133 else {
2134 return (const rb_callable_method_entry_t *)resolved_me;
2135 }
2136}
2137
2138static void
2139remove_method(VALUE klass, ID mid)
2140{
2141 VALUE data;
2142 rb_method_entry_t *me = 0;
2143 VALUE self = klass;
2144
2145 rb_class_modify_check(klass);
2146 klass = RCLASS_ORIGIN(klass);
2147 if (mid == object_id || mid == id__id__ || mid == id__send__ || mid == idInitialize) {
2148 rb_warn("removing '%s' may cause serious problems", rb_id2name(mid));
2149 }
2150
2151 if (!rb_id_table_lookup(RCLASS_M_TBL(klass), mid, &data) ||
2152 !(me = (rb_method_entry_t *)data) ||
2153 (!me->def || me->def->type == VM_METHOD_TYPE_UNDEF) ||
2154 UNDEFINED_REFINED_METHOD_P(me->def)) {
2155 rb_name_err_raise("method '%1$s' not defined in %2$s",
2156 klass, ID2SYM(mid));
2157 }
2158
2159 if (klass != self) {
2160 rb_clear_method_cache(self, mid);
2161 }
2162 rb_clear_method_cache(klass, mid);
2163 rb_id_table_delete(RCLASS_WRITABLE_M_TBL(klass), mid);
2164
2165 rb_vm_check_redefinition_opt_method(me, klass);
2166
2167 if (me->def->type == VM_METHOD_TYPE_REFINED) {
2168 rb_add_refined_method_entry(klass, mid);
2169 }
2170
2171 CALL_METHOD_HOOK(self, removed, mid);
2172}
2173
2174void
2176{
2177 remove_method(klass, mid);
2178}
2179
2180void
2181rb_remove_method(VALUE klass, const char *name)
2182{
2183 remove_method(klass, rb_intern(name));
2184}
2185
2186/*
2187 * call-seq:
2188 * remove_method(symbol) -> self
2189 * remove_method(string) -> self
2190 *
2191 * Removes the method identified by _symbol_ from the current
2192 * class. For an example, see Module#undef_method.
2193 * String arguments are converted to symbols.
2194 */
2195
2196static VALUE
2197rb_mod_remove_method(int argc, VALUE *argv, VALUE mod)
2198{
2199 int i;
2200
2201 for (i = 0; i < argc; i++) {
2202 VALUE v = argv[i];
2203 ID id = rb_check_id(&v);
2204 if (!id) {
2205 rb_name_err_raise("method '%1$s' not defined in %2$s",
2206 mod, v);
2207 }
2208 remove_method(mod, id);
2209 }
2210 return mod;
2211}
2212
2213static void
2214rb_export_method(VALUE klass, ID name, rb_method_visibility_t visi)
2215{
2217 VALUE defined_class;
2218 VALUE origin_class = RCLASS_ORIGIN(klass);
2219
2220 me = search_method0(origin_class, name, &defined_class, true);
2221
2222 if (!me && RB_TYPE_P(klass, T_MODULE)) {
2223 me = search_method(rb_cObject, name, &defined_class);
2224 }
2225
2226 if (UNDEFINED_METHOD_ENTRY_P(me) ||
2227 UNDEFINED_REFINED_METHOD_P(me->def)) {
2228 rb_print_undef(klass, name, METHOD_VISI_UNDEF);
2229 }
2230
2231 if (METHOD_ENTRY_VISI(me) != visi) {
2232 rb_vm_check_redefinition_opt_method(me, klass);
2233
2234 if (klass == defined_class || origin_class == defined_class) {
2235 if (me->def->type == VM_METHOD_TYPE_REFINED) {
2236 // Refinement method entries should always be public because the refinement
2237 // search is always performed.
2238 if (me->def->body.refined.orig_me) {
2239 METHOD_ENTRY_VISI_SET((rb_method_entry_t *)me->def->body.refined.orig_me, visi);
2240 }
2241 }
2242 else {
2243 METHOD_ENTRY_VISI_SET(me, visi);
2244 }
2245 rb_clear_method_cache(klass, name);
2246 }
2247 else {
2248 rb_add_method(klass, name, VM_METHOD_TYPE_ZSUPER, 0, visi);
2249 }
2250 }
2251}
2252
2253#define BOUND_PRIVATE 0x01
2254#define BOUND_RESPONDS 0x02
2255
2256static int
2257method_boundp(VALUE klass, ID id, int ex)
2258{
2259 const rb_callable_method_entry_t *cme;
2260
2261 VM_ASSERT_TYPE2(klass, T_CLASS, T_ICLASS);
2262
2263 if (ex & BOUND_RESPONDS) {
2264 cme = rb_callable_method_entry_with_refinements(klass, id, NULL);
2265 }
2266 else {
2267 cme = callable_method_entry_without_refinements(klass, id, NULL);
2268 }
2269
2270 if (cme != NULL) {
2271 if (ex & ~BOUND_RESPONDS) {
2272 switch (METHOD_ENTRY_VISI(cme)) {
2273 case METHOD_VISI_PRIVATE:
2274 return 0;
2275 case METHOD_VISI_PROTECTED:
2276 if (ex & BOUND_RESPONDS) return 0;
2277 default:
2278 break;
2279 }
2280 }
2281
2282 if (cme->def->type == VM_METHOD_TYPE_NOTIMPLEMENTED) {
2283 if (ex & BOUND_RESPONDS) return 2;
2284 return 0;
2285 }
2286 return 1;
2287 }
2288 return 0;
2289}
2290
2291// deprecated
2292int
2293rb_method_boundp(VALUE klass, ID id, int ex)
2294{
2295 return method_boundp(klass, id, ex);
2296}
2297
2298static void
2299vm_cref_set_visibility(rb_method_visibility_t method_visi, int module_func)
2300{
2301 rb_scope_visibility_t *scope_visi = (rb_scope_visibility_t *)&rb_vm_cref()->scope_visi;
2302 scope_visi->method_visi = method_visi;
2303 scope_visi->module_func = module_func;
2304}
2305
2306void
2307rb_scope_visibility_set(rb_method_visibility_t visi)
2308{
2309 vm_cref_set_visibility(visi, FALSE);
2310}
2311
2312static void
2313scope_visibility_check(void)
2314{
2315 /* Check for public/protected/private/module_function called inside a method */
2316 rb_control_frame_t *cfp = GET_EC()->cfp+1;
2317 if (cfp && cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD) {
2318 rb_warn("calling %s without arguments inside a method may not have the intended effect",
2319 rb_id2name(rb_frame_this_func()));
2320 }
2321}
2322
2323static void
2324rb_scope_module_func_set(void)
2325{
2326 scope_visibility_check();
2327 vm_cref_set_visibility(METHOD_VISI_PRIVATE, TRUE);
2328}
2329
2330const rb_cref_t *rb_vm_cref_in_context(VALUE self, VALUE cbase);
2331void
2332rb_attr(VALUE klass, ID id, int read, int write, int ex)
2333{
2334 ID attriv;
2335 rb_method_visibility_t visi;
2336 const rb_execution_context_t *ec = GET_EC();
2337 const rb_cref_t *cref = rb_vm_cref_in_context(klass, klass);
2338
2339 if (!ex || !cref) {
2340 visi = METHOD_VISI_PUBLIC;
2341 }
2342 else {
2343 switch (vm_scope_visibility_get(ec)) {
2344 case METHOD_VISI_PRIVATE:
2345 if (vm_scope_module_func_check(ec)) {
2346 rb_warning("attribute accessor as module_function");
2347 }
2348 visi = METHOD_VISI_PRIVATE;
2349 break;
2350 case METHOD_VISI_PROTECTED:
2351 visi = METHOD_VISI_PROTECTED;
2352 break;
2353 default:
2354 visi = METHOD_VISI_PUBLIC;
2355 break;
2356 }
2357 }
2358
2359 attriv = rb_intern_str(rb_sprintf("@%"PRIsVALUE, rb_id2str(id)));
2360 if (read) {
2361 rb_add_method(klass, id, VM_METHOD_TYPE_IVAR, (void *)attriv, visi);
2362 }
2363 if (write) {
2364 rb_add_method(klass, rb_id_attrset(id), VM_METHOD_TYPE_ATTRSET, (void *)attriv, visi);
2365 }
2366}
2367
2368void
2370{
2371 const rb_method_entry_t *me;
2372
2373 if (NIL_P(klass)) {
2374 rb_raise(rb_eTypeError, "no class to undef method");
2375 }
2376 rb_class_modify_check(klass);
2377 if (id == object_id || id == id__id__ || id == id__send__ || id == idInitialize) {
2378 rb_warn("undefining '%s' may cause serious problems", rb_id2name(id));
2379 }
2380
2381 me = search_method(klass, id, 0);
2382 if (me && me->def->type == VM_METHOD_TYPE_REFINED) {
2383 me = rb_resolve_refined_method(Qnil, me);
2384 }
2385
2386 if (UNDEFINED_METHOD_ENTRY_P(me) ||
2387 UNDEFINED_REFINED_METHOD_P(me->def)) {
2388 rb_method_name_error(klass, rb_id2str(id));
2389 }
2390
2391 rb_add_method(klass, id, VM_METHOD_TYPE_UNDEF, 0, METHOD_VISI_PUBLIC);
2392
2393 CALL_METHOD_HOOK(klass, undefined, id);
2394}
2395
2396/*
2397 * call-seq:
2398 * undef_method(symbol) -> self
2399 * undef_method(string) -> self
2400 *
2401 * Prevents the current class from responding to calls to the named
2402 * method. Contrast this with <code>remove_method</code>, which deletes
2403 * the method from the particular class; Ruby will still search
2404 * superclasses and mixed-in modules for a possible receiver.
2405 * String arguments are converted to symbols.
2406 *
2407 * class Parent
2408 * def hello
2409 * puts "In parent"
2410 * end
2411 * end
2412 * class Child < Parent
2413 * def hello
2414 * puts "In child"
2415 * end
2416 * end
2417 *
2418 *
2419 * c = Child.new
2420 * c.hello
2421 *
2422 *
2423 * class Child
2424 * remove_method :hello # remove from child, still in parent
2425 * end
2426 * c.hello
2427 *
2428 *
2429 * class Child
2430 * undef_method :hello # prevent any calls to 'hello'
2431 * end
2432 * c.hello
2433 *
2434 * <em>produces:</em>
2435 *
2436 * In child
2437 * In parent
2438 * prog.rb:23: undefined method 'hello' for #<Child:0x401b3bb4> (NoMethodError)
2439 */
2440
2441static VALUE
2442rb_mod_undef_method(int argc, VALUE *argv, VALUE mod)
2443{
2444 int i;
2445 for (i = 0; i < argc; i++) {
2446 VALUE v = argv[i];
2447 ID id = rb_check_id(&v);
2448 if (!id) {
2449 rb_method_name_error(mod, v);
2450 }
2451 rb_undef(mod, id);
2452 }
2453 return mod;
2454}
2455
2456static rb_method_visibility_t
2457check_definition_visibility(VALUE mod, int argc, VALUE *argv)
2458{
2459 const rb_method_entry_t *me;
2460 VALUE mid, include_super, lookup_mod = mod;
2461 int inc_super;
2462 ID id;
2463
2464 rb_scan_args(argc, argv, "11", &mid, &include_super);
2465 id = rb_check_id(&mid);
2466 if (!id) return METHOD_VISI_UNDEF;
2467
2468 if (argc == 1) {
2469 inc_super = 1;
2470 }
2471 else {
2472 inc_super = RTEST(include_super);
2473 if (!inc_super) {
2474 lookup_mod = RCLASS_ORIGIN(mod);
2475 }
2476 }
2477
2478 me = rb_method_entry_without_refinements(lookup_mod, id, NULL);
2479 if (me) {
2480 if (me->def->type == VM_METHOD_TYPE_NOTIMPLEMENTED) return METHOD_VISI_UNDEF;
2481 if (!inc_super && me->owner != mod) return METHOD_VISI_UNDEF;
2482 return METHOD_ENTRY_VISI(me);
2483 }
2484 return METHOD_VISI_UNDEF;
2485}
2486
2487/*
2488 * call-seq:
2489 * mod.method_defined?(symbol, inherit=true) -> true or false
2490 * mod.method_defined?(string, inherit=true) -> true or false
2491 *
2492 * Returns +true+ if the named method is defined by
2493 * _mod_. If _inherit_ is set, the lookup will also search _mod_'s
2494 * ancestors. Public and protected methods are matched.
2495 * String arguments are converted to symbols.
2496 *
2497 * module A
2498 * def method1() end
2499 * def protected_method1() end
2500 * protected :protected_method1
2501 * end
2502 * class B
2503 * def method2() end
2504 * def private_method2() end
2505 * private :private_method2
2506 * end
2507 * class C < B
2508 * include A
2509 * def method3() end
2510 * end
2511 *
2512 * A.method_defined? :method1 #=> true
2513 * C.method_defined? "method1" #=> true
2514 * C.method_defined? "method2" #=> true
2515 * C.method_defined? "method2", true #=> true
2516 * C.method_defined? "method2", false #=> false
2517 * C.method_defined? "method3" #=> true
2518 * C.method_defined? "protected_method1" #=> true
2519 * C.method_defined? "method4" #=> false
2520 * C.method_defined? "private_method2" #=> false
2521 */
2522
2523static VALUE
2524rb_mod_method_defined(int argc, VALUE *argv, VALUE mod)
2525{
2526 rb_method_visibility_t visi = check_definition_visibility(mod, argc, argv);
2527 return RBOOL(visi == METHOD_VISI_PUBLIC || visi == METHOD_VISI_PROTECTED);
2528}
2529
2530static VALUE
2531check_definition(VALUE mod, int argc, VALUE *argv, rb_method_visibility_t visi)
2532{
2533 return RBOOL(check_definition_visibility(mod, argc, argv) == visi);
2534}
2535
2536/*
2537 * call-seq:
2538 * mod.public_method_defined?(symbol, inherit=true) -> true or false
2539 * mod.public_method_defined?(string, inherit=true) -> true or false
2540 *
2541 * Returns +true+ if the named public method is defined by
2542 * _mod_. If _inherit_ is set, the lookup will also search _mod_'s
2543 * ancestors.
2544 * String arguments are converted to symbols.
2545 *
2546 * module A
2547 * def method1() end
2548 * end
2549 * class B
2550 * protected
2551 * def method2() end
2552 * end
2553 * class C < B
2554 * include A
2555 * def method3() end
2556 * end
2557 *
2558 * A.method_defined? :method1 #=> true
2559 * C.public_method_defined? "method1" #=> true
2560 * C.public_method_defined? "method1", true #=> true
2561 * C.public_method_defined? "method1", false #=> true
2562 * C.public_method_defined? "method2" #=> false
2563 * C.method_defined? "method2" #=> true
2564 */
2565
2566static VALUE
2567rb_mod_public_method_defined(int argc, VALUE *argv, VALUE mod)
2568{
2569 return check_definition(mod, argc, argv, METHOD_VISI_PUBLIC);
2570}
2571
2572/*
2573 * call-seq:
2574 * mod.private_method_defined?(symbol, inherit=true) -> true or false
2575 * mod.private_method_defined?(string, inherit=true) -> true or false
2576 *
2577 * Returns +true+ if the named private method is defined by
2578 * _mod_. If _inherit_ is set, the lookup will also search _mod_'s
2579 * ancestors.
2580 * String arguments are converted to symbols.
2581 *
2582 * module A
2583 * def method1() end
2584 * end
2585 * class B
2586 * private
2587 * def method2() end
2588 * end
2589 * class C < B
2590 * include A
2591 * def method3() end
2592 * end
2593 *
2594 * A.method_defined? :method1 #=> true
2595 * C.private_method_defined? "method1" #=> false
2596 * C.private_method_defined? "method2" #=> true
2597 * C.private_method_defined? "method2", true #=> true
2598 * C.private_method_defined? "method2", false #=> false
2599 * C.method_defined? "method2" #=> false
2600 */
2601
2602static VALUE
2603rb_mod_private_method_defined(int argc, VALUE *argv, VALUE mod)
2604{
2605 return check_definition(mod, argc, argv, METHOD_VISI_PRIVATE);
2606}
2607
2608/*
2609 * call-seq:
2610 * mod.protected_method_defined?(symbol, inherit=true) -> true or false
2611 * mod.protected_method_defined?(string, inherit=true) -> true or false
2612 *
2613 * Returns +true+ if the named protected method is defined
2614 * _mod_. If _inherit_ is set, the lookup will also search _mod_'s
2615 * ancestors.
2616 * String arguments are converted to symbols.
2617 *
2618 * module A
2619 * def method1() end
2620 * end
2621 * class B
2622 * protected
2623 * def method2() end
2624 * end
2625 * class C < B
2626 * include A
2627 * def method3() end
2628 * end
2629 *
2630 * A.method_defined? :method1 #=> true
2631 * C.protected_method_defined? "method1" #=> false
2632 * C.protected_method_defined? "method2" #=> true
2633 * C.protected_method_defined? "method2", true #=> true
2634 * C.protected_method_defined? "method2", false #=> false
2635 * C.method_defined? "method2" #=> true
2636 */
2637
2638static VALUE
2639rb_mod_protected_method_defined(int argc, VALUE *argv, VALUE mod)
2640{
2641 return check_definition(mod, argc, argv, METHOD_VISI_PROTECTED);
2642}
2643
2644int
2645rb_method_entry_eq(const rb_method_entry_t *m1, const rb_method_entry_t *m2)
2646{
2647 return rb_method_definition_eq(m1->def, m2->def);
2648}
2649
2650static const rb_method_definition_t *
2651original_method_definition(const rb_method_definition_t *def)
2652{
2653 again:
2654 if (def) {
2655 switch (def->type) {
2656 case VM_METHOD_TYPE_REFINED:
2657 if (def->body.refined.orig_me) {
2658 def = def->body.refined.orig_me->def;
2659 goto again;
2660 }
2661 break;
2662 case VM_METHOD_TYPE_ALIAS:
2663 def = def->body.alias.original_me->def;
2664 goto again;
2665 default:
2666 break;
2667 }
2668 }
2669 return def;
2670}
2671
2672int
2673rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2)
2674{
2675 d1 = original_method_definition(d1);
2676 d2 = original_method_definition(d2);
2677
2678 if (d1 == d2) return 1;
2679 if (!d1 || !d2) return 0;
2680 if (d1->type != d2->type) return 0;
2681
2682 switch (d1->type) {
2683 case VM_METHOD_TYPE_ISEQ:
2684 return d1->body.iseq.iseqptr == d2->body.iseq.iseqptr;
2685 case VM_METHOD_TYPE_CFUNC:
2686 return
2687 d1->body.cfunc.func == d2->body.cfunc.func &&
2688 d1->body.cfunc.argc == d2->body.cfunc.argc;
2689 case VM_METHOD_TYPE_ATTRSET:
2690 case VM_METHOD_TYPE_IVAR:
2691 return d1->body.attr.id == d2->body.attr.id;
2692 case VM_METHOD_TYPE_BMETHOD:
2693 return RTEST(rb_equal(d1->body.bmethod.proc, d2->body.bmethod.proc));
2694 case VM_METHOD_TYPE_MISSING:
2695 return d1->original_id == d2->original_id;
2696 case VM_METHOD_TYPE_ZSUPER:
2697 case VM_METHOD_TYPE_NOTIMPLEMENTED:
2698 case VM_METHOD_TYPE_UNDEF:
2699 return 1;
2700 case VM_METHOD_TYPE_OPTIMIZED:
2701 return (d1->body.optimized.type == d2->body.optimized.type) &&
2702 (d1->body.optimized.index == d2->body.optimized.index);
2703 case VM_METHOD_TYPE_REFINED:
2704 case VM_METHOD_TYPE_ALIAS:
2705 break;
2706 }
2707 rb_bug("rb_method_definition_eq: unsupported type: %d", d1->type);
2708}
2709
2710static st_index_t
2711rb_hash_method_definition(st_index_t hash, const rb_method_definition_t *def)
2712{
2713 hash = rb_hash_uint(hash, def->type);
2714 def = original_method_definition(def);
2715
2716 if (!def) return hash;
2717
2718 switch (def->type) {
2719 case VM_METHOD_TYPE_ISEQ:
2720 return rb_hash_uint(hash, (st_index_t)def->body.iseq.iseqptr->body);
2721 case VM_METHOD_TYPE_CFUNC:
2722 hash = rb_hash_uint(hash, (st_index_t)def->body.cfunc.func);
2723 return rb_hash_uint(hash, def->body.cfunc.argc);
2724 case VM_METHOD_TYPE_ATTRSET:
2725 case VM_METHOD_TYPE_IVAR:
2726 return rb_hash_uint(hash, def->body.attr.id);
2727 case VM_METHOD_TYPE_BMETHOD:
2728 return rb_hash_proc(hash, def->body.bmethod.proc);
2729 case VM_METHOD_TYPE_MISSING:
2730 return rb_hash_uint(hash, def->original_id);
2731 case VM_METHOD_TYPE_ZSUPER:
2732 case VM_METHOD_TYPE_NOTIMPLEMENTED:
2733 case VM_METHOD_TYPE_UNDEF:
2734 return hash;
2735 case VM_METHOD_TYPE_OPTIMIZED:
2736 hash = rb_hash_uint(hash, def->body.optimized.index);
2737 return rb_hash_uint(hash, def->body.optimized.type);
2738 case VM_METHOD_TYPE_REFINED:
2739 case VM_METHOD_TYPE_ALIAS:
2740 break; /* unreachable */
2741 }
2742 rb_bug("rb_hash_method_definition: unsupported method type (%d)", def->type);
2743}
2744
2745st_index_t
2746rb_hash_method_entry(st_index_t hash, const rb_method_entry_t *me)
2747{
2748 return rb_hash_method_definition(hash, me->def);
2749}
2750
2751void
2752rb_alias(VALUE klass, ID alias_name, ID original_name)
2753{
2754 const VALUE target_klass = klass;
2755 VALUE defined_class;
2756 const rb_method_entry_t *orig_me;
2757 rb_method_visibility_t visi = METHOD_VISI_UNDEF;
2758
2759 if (NIL_P(klass)) {
2760 rb_raise(rb_eTypeError, "no class to make alias");
2761 }
2762
2763 rb_class_modify_check(klass);
2764
2765 again:
2766 orig_me = search_method(klass, original_name, &defined_class);
2767
2768 if (orig_me && orig_me->def->type == VM_METHOD_TYPE_REFINED) {
2769 orig_me = rb_resolve_refined_method(Qnil, orig_me);
2770 }
2771
2772 if (UNDEFINED_METHOD_ENTRY_P(orig_me) ||
2773 UNDEFINED_REFINED_METHOD_P(orig_me->def)) {
2774 if ((!RB_TYPE_P(klass, T_MODULE)) ||
2775 (orig_me = search_method(rb_cObject, original_name, &defined_class),
2776 UNDEFINED_METHOD_ENTRY_P(orig_me))) {
2777 rb_print_undef(target_klass, original_name, METHOD_VISI_UNDEF);
2778 }
2779 }
2780
2781 switch (orig_me->def->type) {
2782 case VM_METHOD_TYPE_ZSUPER:
2783 klass = RCLASS_SUPER(klass);
2784 original_name = orig_me->def->original_id;
2785 visi = METHOD_ENTRY_VISI(orig_me);
2786 goto again;
2787 case VM_METHOD_TYPE_ALIAS:
2788 visi = METHOD_ENTRY_VISI(orig_me);
2789 orig_me = orig_me->def->body.alias.original_me;
2790 VM_ASSERT(orig_me->def->type != VM_METHOD_TYPE_ALIAS);
2791 break;
2792 default: break;
2793 }
2794
2795 if (visi == METHOD_VISI_UNDEF) visi = METHOD_ENTRY_VISI(orig_me);
2796
2797 if (orig_me->defined_class == 0) {
2798 rb_method_entry_make(target_klass, alias_name, target_klass, visi,
2799 VM_METHOD_TYPE_ALIAS, NULL, orig_me->called_id,
2800 (void *)rb_method_entry_clone(orig_me));
2801 method_added(target_klass, alias_name);
2802 }
2803 else {
2804 rb_method_entry_t *alias_me;
2805
2806 alias_me = method_entry_set(target_klass, alias_name, orig_me, visi, orig_me->owner);
2807 RB_OBJ_WRITE(alias_me, &alias_me->owner, target_klass);
2808
2809 if (RB_TYPE_P(target_klass, T_MODULE)) {
2810 // defined_class should not be set
2811 }
2812 else {
2813 RB_OBJ_WRITE(alias_me, &alias_me->defined_class, orig_me->defined_class);
2814 }
2815 }
2816}
2817
2818/*
2819 * call-seq:
2820 * alias_method(new_name, old_name) -> symbol
2821 *
2822 * Makes <i>new_name</i> a new copy of the method <i>old_name</i>. This can
2823 * be used to retain access to methods that are overridden.
2824 *
2825 * module Mod
2826 * alias_method :orig_exit, :exit #=> :orig_exit
2827 * def exit(code=0)
2828 * puts "Exiting with code #{code}"
2829 * orig_exit(code)
2830 * end
2831 * end
2832 * include Mod
2833 * exit(99)
2834 *
2835 * <em>produces:</em>
2836 *
2837 * Exiting with code 99
2838 */
2839
2840static VALUE
2841rb_mod_alias_method(VALUE mod, VALUE newname, VALUE oldname)
2842{
2843 ID oldid = rb_check_id(&oldname);
2844 if (!oldid) {
2845 rb_print_undef_str(mod, oldname);
2846 }
2847 VALUE id = rb_to_id(newname);
2848 rb_alias(mod, id, oldid);
2849 return ID2SYM(id);
2850}
2851
2852static void
2853check_and_export_method(VALUE self, VALUE name, rb_method_visibility_t visi)
2854{
2855 ID id = rb_check_id(&name);
2856 if (!id) {
2857 rb_print_undef_str(self, name);
2858 }
2859 rb_export_method(self, id, visi);
2860}
2861
2862static void
2863set_method_visibility(VALUE self, int argc, const VALUE *argv, rb_method_visibility_t visi)
2864{
2865 int i;
2866
2867 rb_check_frozen(self);
2868 if (argc == 0) {
2869 rb_warning("%"PRIsVALUE" with no argument is just ignored",
2870 QUOTE_ID(rb_frame_callee()));
2871 return;
2872 }
2873
2874
2875 VALUE v;
2876
2877 if (argc == 1 && (v = rb_check_array_type(argv[0])) != Qnil) {
2878 long j;
2879
2880 for (j = 0; j < RARRAY_LEN(v); j++) {
2881 check_and_export_method(self, RARRAY_AREF(v, j), visi);
2882 }
2883 }
2884 else {
2885 for (i = 0; i < argc; i++) {
2886 check_and_export_method(self, argv[i], visi);
2887 }
2888 }
2889}
2890
2891static VALUE
2892set_visibility(int argc, const VALUE *argv, VALUE module, rb_method_visibility_t visi)
2893{
2894 if (argc == 0) {
2895 scope_visibility_check();
2896 rb_scope_visibility_set(visi);
2897 return Qnil;
2898 }
2899
2900 set_method_visibility(module, argc, argv, visi);
2901 if (argc == 1) {
2902 return argv[0];
2903 }
2904 return rb_ary_new_from_values(argc, argv);
2905}
2906
2907/*
2908 * call-seq:
2909 * public -> nil
2910 * public(method_name) -> method_name
2911 * public(method_name, method_name, ...) -> array
2912 * public(array) -> array
2913 *
2914 * With no arguments, sets the default visibility for subsequently
2915 * defined methods to public. With arguments, sets the named methods to
2916 * have public visibility.
2917 * String arguments are converted to symbols.
2918 * An Array of Symbols and/or Strings is also accepted.
2919 * If a single argument is passed, it is returned.
2920 * If no argument is passed, nil is returned.
2921 * If multiple arguments are passed, the arguments are returned as an array.
2922 */
2923
2924static VALUE
2925rb_mod_public(int argc, VALUE *argv, VALUE module)
2926{
2927 return set_visibility(argc, argv, module, METHOD_VISI_PUBLIC);
2928}
2929
2930/*
2931 * call-seq:
2932 * protected -> nil
2933 * protected(method_name) -> method_name
2934 * protected(method_name, method_name, ...) -> array
2935 * protected(array) -> array
2936 *
2937 * Sets the visibility of a section or of a list of method names as protected.
2938 * Accepts no arguments, a splat of method names (symbols or strings) or an
2939 * array of method names. Returns the arguments that it received.
2940 *
2941 * == Important difference between protected in other languages
2942 *
2943 * Protected methods in Ruby are different from other languages such as Java,
2944 * where methods are marked as protected to give access to subclasses. In Ruby,
2945 * subclasses <b>already have access to all methods defined in the parent
2946 * class</b>, even private ones.
2947 *
2948 * Marking a method as protected allows <b>different objects of the same
2949 * class</b> to call it.
2950 *
2951 * One use case is for comparison methods, such as <code>==</code>, if we want
2952 * to expose a method for comparison between objects of the same class without
2953 * making the method public to objects of other classes.
2954 *
2955 * == Performance considerations
2956 *
2957 * Protected methods are slower than others because they can't use inline
2958 * cache.
2959 *
2960 * == Example
2961 *
2962 * class Account
2963 * # Mark balance as protected, so that we can compare between accounts
2964 * # without making it public.
2965 * attr_reader :balance
2966 * protected :balance
2967 *
2968 * def initialize(balance)
2969 * @balance = balance
2970 * end
2971 *
2972 * def >(other)
2973 * # The invocation to `other.balance` is allowed because `other` is a
2974 * # different object of the same class (Account).
2975 * balance > other.balance
2976 * end
2977 * end
2978 *
2979 * account1 = Account.new(100)
2980 * account2 = Account.new(50)
2981 *
2982 * account1 > account2 # => true (works)
2983 * account1.balance # => NoMethodError (fails because balance is not public)
2984 *
2985 * To show a private method on RDoc, use <code>:doc:</code> instead of this.
2986 */
2987
2988static VALUE
2989rb_mod_protected(int argc, VALUE *argv, VALUE module)
2990{
2991 return set_visibility(argc, argv, module, METHOD_VISI_PROTECTED);
2992}
2993
2994/*
2995 * call-seq:
2996 * private -> nil
2997 * private(method_name) -> method_name
2998 * private(method_name, method_name, ...) -> array
2999 * private(array) -> array
3000 *
3001 * With no arguments, sets the default visibility for subsequently
3002 * defined methods to private. With arguments, sets the named methods
3003 * to have private visibility.
3004 * String arguments are converted to symbols.
3005 * An Array of Symbols and/or Strings is also accepted.
3006 * If a single argument is passed, it is returned.
3007 * If no argument is passed, nil is returned.
3008 * If multiple arguments are passed, the arguments are returned as an array.
3009 *
3010 * module Mod
3011 * def a() end
3012 * def b() end
3013 * private
3014 * def c() end
3015 * private :a
3016 * end
3017 * Mod.private_instance_methods #=> [:a, :c]
3018 *
3019 * Note that to show a private method on RDoc, use <code>:doc:</code>.
3020 */
3021
3022static VALUE
3023rb_mod_private(int argc, VALUE *argv, VALUE module)
3024{
3025 return set_visibility(argc, argv, module, METHOD_VISI_PRIVATE);
3026}
3027
3028/*
3029 * call-seq:
3030 * ruby2_keywords(method_name, ...) -> nil
3031 *
3032 * For the given method names, marks the method as passing keywords through
3033 * a normal argument splat. This should only be called on methods that
3034 * accept an argument splat (<tt>*args</tt>) but not explicit keywords or
3035 * a keyword splat. It marks the method such that if the method is called
3036 * with keyword arguments, the final hash argument is marked with a special
3037 * flag such that if it is the final element of a normal argument splat to
3038 * another method call, and that method call does not include explicit
3039 * keywords or a keyword splat, the final element is interpreted as keywords.
3040 * In other words, keywords will be passed through the method to other
3041 * methods.
3042 *
3043 * This should only be used for methods that delegate keywords to another
3044 * method, and only for backwards compatibility with Ruby versions before 3.0.
3045 * See https://www.ruby-lang.org/en/news/2019/12/12/separation-of-positional-and-keyword-arguments-in-ruby-3-0/
3046 * for details on why +ruby2_keywords+ exists and when and how to use it.
3047 *
3048 * This method will probably be removed at some point, as it exists only
3049 * for backwards compatibility. As it does not exist in Ruby versions before
3050 * 2.7, check that the module responds to this method before calling it:
3051 *
3052 * module Mod
3053 * def foo(meth, *args, &block)
3054 * send(:"do_#{meth}", *args, &block)
3055 * end
3056 * ruby2_keywords(:foo) if respond_to?(:ruby2_keywords, true)
3057 * end
3058 *
3059 * However, be aware that if the +ruby2_keywords+ method is removed, the
3060 * behavior of the +foo+ method using the above approach will change so that
3061 * the method does not pass through keywords.
3062 */
3063
3064static VALUE
3065rb_mod_ruby2_keywords(int argc, VALUE *argv, VALUE module)
3066{
3067 int i;
3068 VALUE origin_class = RCLASS_ORIGIN(module);
3069
3071 rb_check_frozen(module);
3072
3073 for (i = 0; i < argc; i++) {
3074 VALUE v = argv[i];
3075 ID name = rb_check_id(&v);
3077 VALUE defined_class;
3078
3079 if (!name) {
3080 rb_print_undef_str(module, v);
3081 }
3082
3083 me = search_method(origin_class, name, &defined_class);
3084 if (!me && RB_TYPE_P(module, T_MODULE)) {
3085 me = search_method(rb_cObject, name, &defined_class);
3086 }
3087
3088 if (UNDEFINED_METHOD_ENTRY_P(me) ||
3089 UNDEFINED_REFINED_METHOD_P(me->def)) {
3090 rb_print_undef(module, name, METHOD_VISI_UNDEF);
3091 }
3092
3093 if (module == defined_class || origin_class == defined_class) {
3094 switch (me->def->type) {
3095 case VM_METHOD_TYPE_ISEQ:
3096 if (ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_rest &&
3097 !ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_post &&
3098 !ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_kw &&
3099 !ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.has_kwrest) {
3100 ISEQ_BODY(me->def->body.iseq.iseqptr)->param.flags.ruby2_keywords = 1;
3101 rb_clear_method_cache(module, name);
3102 }
3103 else {
3104 rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method accepts keywords or post arguments or method does not accept argument splat)", QUOTE_ID(name));
3105 }
3106 break;
3107 case VM_METHOD_TYPE_BMETHOD: {
3108 VALUE procval = me->def->body.bmethod.proc;
3109 if (vm_block_handler_type(procval) == block_handler_type_proc) {
3110 procval = vm_proc_to_block_handler(VM_BH_TO_PROC(procval));
3111 }
3112
3113 if (vm_block_handler_type(procval) == block_handler_type_iseq) {
3114 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(procval);
3115 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
3116 if (ISEQ_BODY(iseq)->param.flags.has_rest &&
3117 !ISEQ_BODY(iseq)->param.flags.has_post &&
3118 !ISEQ_BODY(iseq)->param.flags.has_kw &&
3119 !ISEQ_BODY(iseq)->param.flags.has_kwrest) {
3120 ISEQ_BODY(iseq)->param.flags.ruby2_keywords = 1;
3121 rb_clear_method_cache(module, name);
3122 }
3123 else {
3124 rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method accepts keywords or post arguments or method does not accept argument splat)", QUOTE_ID(name));
3125 }
3126 break;
3127 }
3128 }
3129 /* fallthrough */
3130 default:
3131 rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (method not defined in Ruby)", QUOTE_ID(name));
3132 break;
3133 }
3134 }
3135 else {
3136 rb_warn("Skipping set of ruby2_keywords flag for %"PRIsVALUE" (can only set in method defining module)", QUOTE_ID(name));
3137 }
3138 }
3139 return Qnil;
3140}
3141
3142/*
3143 * call-seq:
3144 * mod.public_class_method(symbol, ...) -> mod
3145 * mod.public_class_method(string, ...) -> mod
3146 * mod.public_class_method(array) -> mod
3147 *
3148 * Makes a list of existing class methods public.
3149 *
3150 * String arguments are converted to symbols.
3151 * An Array of Symbols and/or Strings is also accepted.
3152 */
3153
3154static VALUE
3155rb_mod_public_method(int argc, VALUE *argv, VALUE obj)
3156{
3157 set_method_visibility(rb_singleton_class(obj), argc, argv, METHOD_VISI_PUBLIC);
3158 return obj;
3159}
3160
3161/*
3162 * call-seq:
3163 * mod.private_class_method(symbol, ...) -> mod
3164 * mod.private_class_method(string, ...) -> mod
3165 * mod.private_class_method(array) -> mod
3166 *
3167 * Makes existing class methods private. Often used to hide the default
3168 * constructor <code>new</code>.
3169 *
3170 * String arguments are converted to symbols.
3171 * An Array of Symbols and/or Strings is also accepted.
3172 *
3173 * class SimpleSingleton # Not thread safe
3174 * private_class_method :new
3175 * def SimpleSingleton.create(*args, &block)
3176 * @me = new(*args, &block) if ! @me
3177 * @me
3178 * end
3179 * end
3180 */
3181
3182static VALUE
3183rb_mod_private_method(int argc, VALUE *argv, VALUE obj)
3184{
3185 set_method_visibility(rb_singleton_class(obj), argc, argv, METHOD_VISI_PRIVATE);
3186 return obj;
3187}
3188
3189/*
3190 * call-seq:
3191 * public
3192 * public(symbol, ...)
3193 * public(string, ...)
3194 * public(array)
3195 *
3196 * With no arguments, sets the default visibility for subsequently
3197 * defined methods to public. With arguments, sets the named methods to
3198 * have public visibility.
3199 *
3200 * String arguments are converted to symbols.
3201 * An Array of Symbols and/or Strings is also accepted.
3202 */
3203
3204static VALUE
3205top_public(int argc, VALUE *argv, VALUE _)
3206{
3207 return rb_mod_public(argc, argv, rb_top_main_class("public"));
3208}
3209
3210/*
3211 * call-seq:
3212 * private
3213 * private(symbol, ...)
3214 * private(string, ...)
3215 * private(array)
3216 *
3217 * With no arguments, sets the default visibility for subsequently
3218 * defined methods to private. With arguments, sets the named methods to
3219 * have private visibility.
3220 *
3221 * String arguments are converted to symbols.
3222 * An Array of Symbols and/or Strings is also accepted.
3223 */
3224static VALUE
3225top_private(int argc, VALUE *argv, VALUE _)
3226{
3227 return rb_mod_private(argc, argv, rb_top_main_class("private"));
3228}
3229
3230/*
3231 * call-seq:
3232 * ruby2_keywords(method_name, ...) -> self
3233 *
3234 * For the given method names, marks the method as passing keywords through
3235 * a normal argument splat. See Module#ruby2_keywords in detail.
3236 */
3237static VALUE
3238top_ruby2_keywords(int argc, VALUE *argv, VALUE module)
3239{
3240 return rb_mod_ruby2_keywords(argc, argv, rb_top_main_class("ruby2_keywords"));
3241}
3242
3243/*
3244 * call-seq:
3245 * module_function -> nil
3246 * module_function(method_name) -> method_name
3247 * module_function(method_name, method_name, ...) -> array
3248 *
3249 * Creates module functions for the named methods. These functions may
3250 * be called with the module as a receiver, and also become available
3251 * as instance methods to classes that mix in the module. Module
3252 * functions are copies of the original, and so may be changed
3253 * independently. The instance-method versions are made private. If
3254 * used with no arguments, subsequently defined methods become module
3255 * functions.
3256 * String arguments are converted to symbols.
3257 * If a single argument is passed, it is returned.
3258 * If no argument is passed, nil is returned.
3259 * If multiple arguments are passed, the arguments are returned as an array.
3260 *
3261 * module Mod
3262 * def one
3263 * "This is one"
3264 * end
3265 * module_function :one
3266 * end
3267 * class Cls
3268 * include Mod
3269 * def call_one
3270 * one
3271 * end
3272 * end
3273 * Mod.one #=> "This is one"
3274 * c = Cls.new
3275 * c.call_one #=> "This is one"
3276 * module Mod
3277 * def one
3278 * "This is the new one"
3279 * end
3280 * end
3281 * Mod.one #=> "This is one"
3282 * c.call_one #=> "This is the new one"
3283 */
3284
3285static VALUE
3286rb_mod_modfunc(int argc, VALUE *argv, VALUE module)
3287{
3288 int i;
3289 ID id;
3290 const rb_method_entry_t *me;
3291
3292 if (!RB_TYPE_P(module, T_MODULE)) {
3293 rb_raise(rb_eTypeError, "module_function must be called for modules");
3294 }
3295
3296 if (argc == 0) {
3297 rb_scope_module_func_set();
3298 return Qnil;
3299 }
3300
3301 set_method_visibility(module, argc, argv, METHOD_VISI_PRIVATE);
3302
3303 for (i = 0; i < argc; i++) {
3304 VALUE m = module;
3305
3306 id = rb_to_id(argv[i]);
3307 for (;;) {
3308 me = search_method(m, id, 0);
3309 if (me == 0) {
3310 me = search_method(rb_cObject, id, 0);
3311 }
3312 if (UNDEFINED_METHOD_ENTRY_P(me)) {
3313 rb_print_undef(module, id, METHOD_VISI_UNDEF);
3314 }
3315 if (me->def->type != VM_METHOD_TYPE_ZSUPER) {
3316 break; /* normal case: need not to follow 'super' link */
3317 }
3318 m = RCLASS_SUPER(m);
3319 if (!m)
3320 break;
3321 }
3322 rb_method_entry_set(rb_singleton_class(module), id, me, METHOD_VISI_PUBLIC);
3323 }
3324 if (argc == 1) {
3325 return argv[0];
3326 }
3327 return rb_ary_new_from_values(argc, argv);
3328}
3329
3330#ifdef __GNUC__
3331#pragma push_macro("rb_method_basic_definition_p")
3332#undef rb_method_basic_definition_p
3333#endif
3334int
3335rb_method_basic_definition_p(VALUE klass, ID id)
3336{
3337 const rb_callable_method_entry_t *cme;
3338 if (!klass) return TRUE; /* hidden object cannot be overridden */
3339 cme = rb_callable_method_entry(klass, id);
3340 return (cme && METHOD_ENTRY_BASIC(cme)) ? TRUE : FALSE;
3341}
3342#ifdef __GNUC__
3343#pragma pop_macro("rb_method_basic_definition_p")
3344#endif
3345
3346static VALUE
3347call_method_entry(rb_execution_context_t *ec, VALUE defined_class, VALUE obj, ID id,
3348 const rb_callable_method_entry_t *cme, int argc, const VALUE *argv, int kw_splat)
3349{
3350 VALUE passed_block_handler = vm_passed_block_handler(ec);
3351 VALUE result = rb_vm_call_kw(ec, obj, id, argc, argv, cme, kw_splat);
3352 vm_passed_block_handler_set(ec, passed_block_handler);
3353 return result;
3354}
3355
3356static VALUE
3357basic_obj_respond_to_missing(rb_execution_context_t *ec, VALUE klass, VALUE obj,
3358 VALUE mid, VALUE priv)
3359{
3360 VALUE defined_class, args[2];
3361 const ID rtmid = idRespond_to_missing;
3362 const rb_callable_method_entry_t *const cme = callable_method_entry(klass, rtmid, &defined_class);
3363
3364 if (!cme || METHOD_ENTRY_BASIC(cme)) return Qundef;
3365 args[0] = mid;
3366 args[1] = priv;
3367 return call_method_entry(ec, defined_class, obj, rtmid, cme, 2, args, RB_NO_KEYWORDS);
3368}
3369
3370static inline int
3371basic_obj_respond_to(rb_execution_context_t *ec, VALUE obj, ID id, int pub)
3372{
3373 VALUE klass = CLASS_OF(obj);
3374 VALUE ret;
3375
3376 switch (method_boundp(klass, id, pub|BOUND_RESPONDS)) {
3377 case 2:
3378 return FALSE;
3379 case 0:
3380 ret = basic_obj_respond_to_missing(ec, klass, obj, ID2SYM(id),
3381 RBOOL(!pub));
3382 return RTEST(ret) && !UNDEF_P(ret);
3383 default:
3384 return TRUE;
3385 }
3386}
3387
3388static int
3389vm_respond_to(rb_execution_context_t *ec, VALUE klass, VALUE obj, ID id, int priv)
3390{
3391 VALUE defined_class;
3392 const ID resid = idRespond_to;
3393 const rb_callable_method_entry_t *const cme = callable_method_entry(klass, resid, &defined_class);
3394
3395 if (!cme) return -1;
3396 if (METHOD_ENTRY_BASIC(cme)) {
3397 return -1;
3398 }
3399 else {
3400 int argc = 1;
3401 VALUE args[2];
3402 VALUE result;
3403
3404 args[0] = ID2SYM(id);
3405 args[1] = Qtrue;
3406 if (priv) {
3407 argc = rb_method_entry_arity((const rb_method_entry_t *)cme);
3408 if (argc > 2) {
3409 rb_raise(rb_eArgError,
3410 "respond_to? must accept 1 or 2 arguments (requires %d)",
3411 argc);
3412 }
3413 if (argc != 1) {
3414 argc = 2;
3415 }
3416 else if (!NIL_P(ruby_verbose)) {
3417 VALUE location = rb_method_entry_location((const rb_method_entry_t *)cme);
3419 "%"PRIsVALUE"%c""respond_to?(:%"PRIsVALUE") uses"
3420 " the deprecated method signature, which takes one parameter",
3421 (RCLASS_SINGLETON_P(klass) ? obj : klass),
3422 (RCLASS_SINGLETON_P(klass) ? '.' : '#'),
3423 QUOTE_ID(id));
3424 if (!NIL_P(location)) {
3425 VALUE path = RARRAY_AREF(location, 0);
3426 VALUE line = RARRAY_AREF(location, 1);
3427 if (!NIL_P(path)) {
3429 RSTRING_PTR(path), NUM2INT(line),
3430 "respond_to? is defined here");
3431 }
3432 }
3433 }
3434 }
3435 result = call_method_entry(ec, defined_class, obj, resid, cme, argc, args, RB_NO_KEYWORDS);
3436 return RTEST(result);
3437 }
3438}
3439
3440int
3441rb_obj_respond_to(VALUE obj, ID id, int priv)
3442{
3443 rb_execution_context_t *ec = GET_EC();
3444 return rb_ec_obj_respond_to(ec, obj, id, priv);
3445}
3446
3447int
3448rb_ec_obj_respond_to(rb_execution_context_t *ec, VALUE obj, ID id, int priv)
3449{
3450 VALUE klass = CLASS_OF(obj);
3451 int ret = vm_respond_to(ec, klass, obj, id, priv);
3452 if (ret == -1) ret = basic_obj_respond_to(ec, obj, id, !priv);
3453 return ret;
3454}
3455
3456int
3458{
3459 return rb_obj_respond_to(obj, id, FALSE);
3460}
3461
3462
3463/*
3464 * call-seq:
3465 * obj.respond_to?(symbol, include_all=false) -> true or false
3466 * obj.respond_to?(string, include_all=false) -> true or false
3467 *
3468 * Returns +true+ if _obj_ responds to the given method. Private and
3469 * protected methods are included in the search only if the optional
3470 * second parameter evaluates to +true+.
3471 *
3472 * If the method is not implemented,
3473 * as Process.fork on Windows, File.lchmod on GNU/Linux, etc.,
3474 * false is returned.
3475 *
3476 * If the method is not defined, <code>respond_to_missing?</code>
3477 * method is called and the result is returned.
3478 *
3479 * When the method name parameter is given as a string, the string is
3480 * converted to a symbol.
3481 */
3482
3483static VALUE
3484obj_respond_to(int argc, VALUE *argv, VALUE obj)
3485{
3486 VALUE mid, priv;
3487 ID id;
3488 rb_execution_context_t *ec = GET_EC();
3489
3490 rb_scan_args(argc, argv, "11", &mid, &priv);
3491 if (!(id = rb_check_id(&mid))) {
3492 VALUE ret = basic_obj_respond_to_missing(ec, CLASS_OF(obj), obj,
3493 rb_to_symbol(mid), priv);
3494 if (UNDEF_P(ret)) ret = Qfalse;
3495 return ret;
3496 }
3497 return RBOOL(basic_obj_respond_to(ec, obj, id, !RTEST(priv)));
3498}
3499
3500/*
3501 * call-seq:
3502 * obj.respond_to_missing?(symbol, include_all) -> true or false
3503 * obj.respond_to_missing?(string, include_all) -> true or false
3504 *
3505 * DO NOT USE THIS DIRECTLY.
3506 *
3507 * Hook method to return whether the _obj_ can respond to _id_ method
3508 * or not.
3509 *
3510 * When the method name parameter is given as a string, the string is
3511 * converted to a symbol.
3512 *
3513 * See #respond_to?, and the example of BasicObject.
3514 */
3515static VALUE
3516obj_respond_to_missing(VALUE obj, VALUE mid, VALUE priv)
3517{
3518 return Qfalse;
3519}
3520
3521void
3522Init_eval_method(void)
3523{
3524 rb_define_method(rb_mKernel, "respond_to?", obj_respond_to, -1);
3525 rb_define_method(rb_mKernel, "respond_to_missing?", obj_respond_to_missing, 2);
3526
3527 rb_define_method(rb_cModule, "remove_method", rb_mod_remove_method, -1);
3528 rb_define_method(rb_cModule, "undef_method", rb_mod_undef_method, -1);
3529 rb_define_method(rb_cModule, "alias_method", rb_mod_alias_method, 2);
3530 rb_define_private_method(rb_cModule, "public", rb_mod_public, -1);
3531 rb_define_private_method(rb_cModule, "protected", rb_mod_protected, -1);
3532 rb_define_private_method(rb_cModule, "private", rb_mod_private, -1);
3533 rb_define_private_method(rb_cModule, "module_function", rb_mod_modfunc, -1);
3534 rb_define_private_method(rb_cModule, "ruby2_keywords", rb_mod_ruby2_keywords, -1);
3535
3536 rb_define_method(rb_cModule, "method_defined?", rb_mod_method_defined, -1);
3537 rb_define_method(rb_cModule, "public_method_defined?", rb_mod_public_method_defined, -1);
3538 rb_define_method(rb_cModule, "private_method_defined?", rb_mod_private_method_defined, -1);
3539 rb_define_method(rb_cModule, "protected_method_defined?", rb_mod_protected_method_defined, -1);
3540 rb_define_method(rb_cModule, "public_class_method", rb_mod_public_method, -1);
3541 rb_define_method(rb_cModule, "private_class_method", rb_mod_private_method, -1);
3542
3544 "public", top_public, -1);
3546 "private", top_private, -1);
3548 "ruby2_keywords", top_ruby2_keywords, -1);
3549
3550 {
3551#define REPLICATE_METHOD(klass, id) do { \
3552 const rb_method_entry_t *me = rb_method_entry((klass), (id)); \
3553 rb_method_entry_set((klass), (id), me, METHOD_ENTRY_VISI(me)); \
3554 } while (0)
3555
3556 REPLICATE_METHOD(rb_eException, idMethodMissing);
3557 REPLICATE_METHOD(rb_eException, idRespond_to);
3558 REPLICATE_METHOD(rb_eException, idRespond_to_missing);
3559 }
3560}
#define RUBY_ASSERT_ALWAYS(expr,...)
A variant of RUBY_ASSERT that does not interface with RUBY_DEBUG.
Definition assert.h:199
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
Definition atomic.h:69
#define RUBY_ATOMIC_FETCH_ADD(var, val)
Atomically replaces the value pointed by var with the result of addition of val to the old value of v...
Definition atomic.h:118
#define RUBY_ATOMIC_FETCH_SUB(var, val)
Atomically replaces the value pointed by var with the result of subtraction of val to the old value o...
Definition atomic.h:129
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
Definition class.c:2922
void rb_class_modify_check(VALUE klass)
Asserts that klass is not a frozen class.
Definition eval.c:428
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3255
#define xfree
Old name of ruby_xfree.
Definition xmalloc.h:58
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define ZALLOC
Old name of RB_ZALLOC.
Definition memory.h:402
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:205
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define T_ICLASS
Old name of RUBY_T_ICLASS.
Definition value_type.h:66
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
Definition int.h:44
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define NIL_P
Old name of RB_NIL_P.
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition fl_type.h:127
void rb_notimplement(void)
Definition error.c:3889
void rb_category_warn(rb_warning_category_t category, const char *fmt,...)
Identical to rb_category_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:476
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
Definition error.h:476
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1418
void rb_category_compile_warn(rb_warning_category_t category, const char *file, int line, const char *fmt,...)
Identical to rb_compile_warn(), except it also accepts category.
Definition error.c:439
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_eException
Mother of all exceptions.
Definition error.c:1410
void rb_warning(const char *fmt,...)
Issues a warning.
Definition error.c:497
@ RB_WARN_CATEGORY_DEPRECATED
Warning is for deprecated features.
Definition error.h:48
VALUE rb_mKernel
Kernel module.
Definition object.c:60
VALUE rb_cObject
Object class.
Definition object.c:61
VALUE rb_cModule
Module class.
Definition object.c:62
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:176
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition gc.h:615
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
void rb_undef(VALUE mod, ID mid)
Inserts a method entry that hides previous method definition of the given name.
Definition vm_method.c:2369
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
Definition error.h:35
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:943
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1777
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
Definition variable.c:136
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3457
VALUE(* rb_alloc_func_t)(VALUE klass)
This is the type of functions that ruby calls when trying to allocate an object.
Definition vm.h:219
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1705
void rb_alias(VALUE klass, ID dst, ID src)
Resembles alias.
Definition vm_method.c:2752
void rb_attr(VALUE klass, ID name, int need_reader, int need_writer, int honour_visibility)
This function resembles now-deprecated Module#attr.
Definition vm_method.c:2332
void rb_remove_method(VALUE klass, const char *name)
Removes a method.
Definition vm_method.c:2181
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
Definition vm_method.c:1711
void rb_clear_constant_cache_for_id(ID id)
Clears the inline constant caches associated with a particular ID.
Definition vm_method.c:329
void rb_remove_method_id(VALUE klass, ID mid)
Identical to rb_remove_method(), except it accepts the method name as ID.
Definition vm_method.c:2175
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
VALUE rb_f_notimplement(int argc, const VALUE *argv, VALUE obj, VALUE marker)
Raises rb_eNotImpError.
Definition vm_method.c:855
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
Definition vm_method.c:2293
int rb_obj_respond_to(VALUE obj, ID mid, int private_p)
Identical to rb_respond_to(), except it additionally takes the visibility parameter.
Definition vm_method.c:3441
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
Definition symbol.c:1171
VALUE rb_to_symbol(VALUE name)
Identical to rb_intern_str(), except it generates a dynamic symbol if necessary.
Definition string.c:12701
ID rb_to_id(VALUE str)
Identical to rb_intern_str(), except it tries to convert the parameter object to an instance of rb_cS...
Definition string.c:12691
int capa
Designed capacity of the buffer.
Definition io.h:11
VALUE type(ANYARGS)
ANYARGS-ed function type.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RBASIC(obj)
Convenient casting macro.
Definition rbasic.h:40
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
Definition rclass.h:44
#define RUBY_TYPED_FREE_IMMEDIATELY
Macros to see if each corresponding flag is defined.
Definition rtypeddata.h:119
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:561
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
Definition stdarg.h:64
Definition vm_method.c:388
Definition method.h:63
CREF (Class REFerence)
Definition method.h:45
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:211
size_t(* dsize)(const void *)
This function is to query the size of the underlying memory regions.
Definition rtypeddata.h:251
RUBY_DATA_FUNC dfree
This function is called when the object is no longer used.
Definition rtypeddata.h:241
struct rb_data_type_struct::@56 function
Function pointers.
const char * wrap_struct_name
Name of structs of this kind.
Definition rtypeddata.h:218
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:325
Definition method.h:55
rb_cref_t * cref
class reference, should be marked
Definition method.h:144
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Definition method.h:143
Definition st.h:79
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376