12#include "ruby/internal/config.h"
22#include "internal/array.h"
23#include "internal/compile.h"
24#include "internal/complex.h"
25#include "internal/encoding.h"
26#include "internal/error.h"
27#include "internal/gc.h"
28#include "internal/hash.h"
29#include "internal/io.h"
30#include "internal/numeric.h"
31#include "internal/object.h"
32#include "internal/rational.h"
33#include "internal/re.h"
34#include "internal/ruby_parser.h"
35#include "internal/symbol.h"
36#include "internal/thread.h"
37#include "internal/variable.h"
43#include "vm_callinfo.h"
49#include "insns_info.inc"
51#define FIXNUM_INC(n, i) ((n)+(INT2FIX(i)&~FIXNUM_FLAG))
85 unsigned int rescued: 2;
86 unsigned int unremovable: 1;
91 enum ruby_vminsn_type insn_id;
121 const void *ensure_node;
126const ID rb_iseq_shared_exc_local_tbl[] = {idERROR_INFO};
146#define compile_debug CPDEBUG
148#define compile_debug ISEQ_COMPILE_DATA(iseq)->option->debug_level
153#define compile_debug_print_indent(level) \
154 ruby_debug_print_indent((level), compile_debug, gl_node_level * 2)
156#define debugp(header, value) (void) \
157 (compile_debug_print_indent(1) && \
158 ruby_debug_print_value(1, compile_debug, (header), (value)))
160#define debugi(header, id) (void) \
161 (compile_debug_print_indent(1) && \
162 ruby_debug_print_id(1, compile_debug, (header), (id)))
164#define debugp_param(header, value) (void) \
165 (compile_debug_print_indent(1) && \
166 ruby_debug_print_value(1, compile_debug, (header), (value)))
168#define debugp_verbose(header, value) (void) \
169 (compile_debug_print_indent(2) && \
170 ruby_debug_print_value(2, compile_debug, (header), (value)))
172#define debugp_verbose_node(header, value) (void) \
173 (compile_debug_print_indent(10) && \
174 ruby_debug_print_value(10, compile_debug, (header), (value)))
176#define debug_node_start(node) ((void) \
177 (compile_debug_print_indent(1) && \
178 (ruby_debug_print_node(1, CPDEBUG, "", (const NODE *)(node)), gl_node_level)), \
181#define debug_node_end() gl_node_level --
185#define debugi(header, id) ((void)0)
186#define debugp(header, value) ((void)0)
187#define debugp_verbose(header, value) ((void)0)
188#define debugp_verbose_node(header, value) ((void)0)
189#define debugp_param(header, value) ((void)0)
190#define debug_node_start(node) ((void)0)
191#define debug_node_end() ((void)0)
194#if CPDEBUG > 1 || CPDEBUG < 0
196#define printf ruby_debug_printf
197#define debugs if (compile_debug_print_indent(1)) ruby_debug_printf
198#define debug_compile(msg, v) ((void)(compile_debug_print_indent(1) && fputs((msg), stderr)), (v))
200#define debugs if(0)printf
201#define debug_compile(msg, v) (v)
204#define LVAR_ERRINFO (1)
207#define NEW_LABEL(l) new_label_body(iseq, (l))
208#define LABEL_FORMAT "<L%03d>"
210#define NEW_ISEQ(node, name, type, line_no) \
211 new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
213#define NEW_CHILD_ISEQ(node, name, type, line_no) \
214 new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
216#define NEW_CHILD_ISEQ_WITH_CALLBACK(callback_func, name, type, line_no) \
217 new_child_iseq_with_callback(iseq, (callback_func), (name), iseq, (type), (line_no))
220#define ADD_SEQ(seq1, seq2) \
221 APPEND_LIST((seq1), (seq2))
224#define ADD_INSN(seq, line_node, insn) \
225 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 0))
228#define ADD_SYNTHETIC_INSN(seq, line_no, node_id, insn) \
229 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (line_no), (node_id), BIN(insn), 0))
232#define INSERT_BEFORE_INSN(next, line_no, node_id, insn) \
233 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
236#define INSERT_AFTER_INSN(prev, line_no, node_id, insn) \
237 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
240#define ADD_INSN1(seq, line_node, insn, op1) \
241 ADD_ELEM((seq), (LINK_ELEMENT *) \
242 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 1, (VALUE)(op1)))
245#define INSERT_BEFORE_INSN1(next, line_no, node_id, insn, op1) \
246 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) \
247 new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
250#define INSERT_AFTER_INSN1(prev, line_no, node_id, insn, op1) \
251 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) \
252 new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
254#define LABEL_REF(label) ((label)->refcnt++)
257#define ADD_INSNL(seq, line_node, insn, label) (ADD_INSN1(seq, line_node, insn, label), LABEL_REF(label))
259#define ADD_INSN2(seq, line_node, insn, op1, op2) \
260 ADD_ELEM((seq), (LINK_ELEMENT *) \
261 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
263#define ADD_INSN3(seq, line_node, insn, op1, op2, op3) \
264 ADD_ELEM((seq), (LINK_ELEMENT *) \
265 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
268#define ADD_SEND(seq, line_node, id, argc) \
269 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
271#define ADD_SEND_WITH_FLAG(seq, line_node, id, argc, flag) \
272 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)(flag), NULL)
274#define ADD_SEND_WITH_BLOCK(seq, line_node, id, argc, block) \
275 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
277#define ADD_CALL_RECEIVER(seq, line_node) \
278 ADD_INSN((seq), (line_node), putself)
280#define ADD_CALL(seq, line_node, id, argc) \
281 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
283#define ADD_CALL_WITH_BLOCK(seq, line_node, id, argc, block) \
284 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
286#define ADD_SEND_R(seq, line_node, id, argc, block, flag, keywords) \
287 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
289#define ADD_TRACE(seq, event) \
290 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), 0))
291#define ADD_TRACE_WITH_DATA(seq, event, data) \
292 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), (data)))
294static void iseq_add_getlocal(
rb_iseq_t *iseq,
LINK_ANCHOR *
const seq,
const NODE *
const line_node,
int idx,
int level);
295static void iseq_add_setlocal(
rb_iseq_t *iseq,
LINK_ANCHOR *
const seq,
const NODE *
const line_node,
int idx,
int level);
297#define ADD_GETLOCAL(seq, line_node, idx, level) iseq_add_getlocal(iseq, (seq), (line_node), (idx), (level))
298#define ADD_SETLOCAL(seq, line_node, idx, level) iseq_add_setlocal(iseq, (seq), (line_node), (idx), (level))
301#define ADD_LABEL(seq, label) \
302 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
304#define APPEND_LABEL(seq, before, label) \
305 APPEND_ELEM((seq), (before), (LINK_ELEMENT *) (label))
307#define ADD_ADJUST(seq, line_node, label) \
308 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), nd_line(line_node)))
310#define ADD_ADJUST_RESTORE(seq, label) \
311 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
313#define LABEL_UNREMOVABLE(label) \
314 ((label) ? (LABEL_REF(label), (label)->unremovable=1) : 0)
315#define ADD_CATCH_ENTRY(type, ls, le, iseqv, lc) do { \
316 VALUE _e = rb_ary_new3(5, (type), \
317 (VALUE)(ls) | 1, (VALUE)(le) | 1, \
318 (VALUE)(iseqv), (VALUE)(lc) | 1); \
319 LABEL_UNREMOVABLE(ls); \
322 if (NIL_P(ISEQ_COMPILE_DATA(iseq)->catch_table_ary)) \
323 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, rb_ary_hidden_new(3)); \
324 rb_ary_push(ISEQ_COMPILE_DATA(iseq)->catch_table_ary, freeze_hide_obj(_e)); \
328#define COMPILE(anchor, desc, node) \
329 (debug_compile("== " desc "\n", \
330 iseq_compile_each(iseq, (anchor), (node), 0)))
333#define COMPILE_POPPED(anchor, desc, node) \
334 (debug_compile("== " desc "\n", \
335 iseq_compile_each(iseq, (anchor), (node), 1)))
338#define COMPILE_(anchor, desc, node, popped) \
339 (debug_compile("== " desc "\n", \
340 iseq_compile_each(iseq, (anchor), (node), (popped))))
342#define COMPILE_RECV(anchor, desc, node, recv) \
343 (private_recv_p(node) ? \
344 (ADD_INSN(anchor, node, putself), VM_CALL_FCALL) : \
345 COMPILE(anchor, desc, recv) ? 0 : -1)
347#define OPERAND_AT(insn, idx) \
348 (((INSN*)(insn))->operands[(idx)])
350#define INSN_OF(insn) \
351 (((INSN*)(insn))->insn_id)
353#define IS_INSN(link) ((link)->type == ISEQ_ELEMENT_INSN)
354#define IS_LABEL(link) ((link)->type == ISEQ_ELEMENT_LABEL)
355#define IS_ADJUST(link) ((link)->type == ISEQ_ELEMENT_ADJUST)
356#define IS_TRACE(link) ((link)->type == ISEQ_ELEMENT_TRACE)
357#define IS_INSN_ID(iobj, insn) (INSN_OF(iobj) == BIN(insn))
358#define IS_NEXT_INSN_ID(link, insn) \
359 ((link)->next && IS_INSN((link)->next) && IS_INSN_ID((link)->next, insn))
367append_compile_error(const
rb_iseq_t *iseq,
int line, const
char *fmt, ...)
369 VALUE err_info = ISEQ_COMPILE_DATA(iseq)->err_info;
370 VALUE file = rb_iseq_path(iseq);
375 err = rb_syntax_error_append(err, file, line, -1, NULL, fmt, args);
377 if (
NIL_P(err_info)) {
378 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->err_info, err);
381 else if (!err_info) {
392compile_bug(
rb_iseq_t *iseq,
int line,
const char *fmt, ...)
396 rb_report_bug_valist(rb_iseq_path(iseq), line, fmt, args);
402#define COMPILE_ERROR append_compile_error
404#define ERROR_ARGS_AT(n) iseq, nd_line(n),
405#define ERROR_ARGS ERROR_ARGS_AT(node)
407#define EXPECT_NODE(prefix, node, ndtype, errval) \
409 const NODE *error_node = (node); \
410 enum node_type error_type = nd_type(error_node); \
411 if (error_type != (ndtype)) { \
412 COMPILE_ERROR(ERROR_ARGS_AT(error_node) \
413 prefix ": " #ndtype " is expected, but %s", \
414 ruby_node_name(error_type)); \
419#define EXPECT_NODE_NONULL(prefix, parent, ndtype, errval) \
421 COMPILE_ERROR(ERROR_ARGS_AT(parent) \
422 prefix ": must be " #ndtype ", but 0"); \
426#define UNKNOWN_NODE(prefix, node, errval) \
428 const NODE *error_node = (node); \
429 COMPILE_ERROR(ERROR_ARGS_AT(error_node) prefix ": unknown node (%s)", \
430 ruby_node_name(nd_type(error_node))); \
437#define CHECK(sub) if (!(sub)) {BEFORE_RETURN;return COMPILE_NG;}
438#define NO_CHECK(sub) (void)(sub)
441#define DECL_ANCHOR(name) \
442 LINK_ANCHOR name[1] = {{{ISEQ_ELEMENT_ANCHOR,},&name[0].anchor}}
443#define INIT_ANCHOR(name) \
444 ((name->last = &name->anchor)->next = NULL)
447freeze_hide_obj(
VALUE obj)
450 RBASIC_CLEAR_CLASS(obj);
454#include "optinsn.inc"
455#if OPT_INSTRUCTIONS_UNIFICATION
456#include "optunifs.inc"
461#define ISEQ_ARG iseq,
462#define ISEQ_ARG_DECLARE rb_iseq_t *iseq,
465#define ISEQ_ARG_DECLARE
469#define gl_node_level ISEQ_COMPILE_DATA(iseq)->node_level
475static int insn_data_length(
INSN *iobj);
476static int calc_sp_depth(
int depth,
INSN *iobj);
478static INSN *new_insn_body(
rb_iseq_t *iseq,
int line_no,
int node_id,
enum ruby_vminsn_type insn_id,
int argc, ...);
491static int iseq_set_exception_local_table(
rb_iseq_t *iseq);
495static int iseq_set_exception_table(
rb_iseq_t *iseq);
496static int iseq_set_optargs_table(
rb_iseq_t *iseq);
497static int iseq_set_parameters_lvar_state(
const rb_iseq_t *iseq);
500static int compile_hash(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *node,
int method_call_keywords,
int popped);
507verify_list(ISEQ_ARG_DECLARE
const char *info,
LINK_ANCHOR *
const anchor)
513 if (!compile_debug)
return;
515 list = anchor->anchor.next;
516 plist = &anchor->anchor;
518 if (plist != list->prev) {
525 if (anchor->last != plist && anchor->last != 0) {
530 rb_bug(
"list verify error: %08x (%s)", flag, info);
535#define verify_list(info, anchor) verify_list(iseq, (info), (anchor))
542 VALUE *original = rb_iseq_original_iseq(iseq);
544 while (i < ISEQ_BODY(iseq)->iseq_size) {
545 VALUE insn = original[i];
546 const char *types = insn_op_types(insn);
548 for (
int j=0; types[j]; j++) {
549 if (types[j] == TS_CALLDATA) {
553 if (cc != vm_cc_empty()) {
555 rb_bug(
"call cache is not initialized by vm_cc_empty()");
562 for (
unsigned int i=0; i<ISEQ_BODY(iseq)->ci_size; i++) {
563 struct rb_call_data *cd = &ISEQ_BODY(iseq)->call_data[i];
566 if (cc != NULL && cc != vm_cc_empty()) {
568 rb_bug(
"call cache is not initialized by vm_cc_empty()");
580 elem->prev = anchor->last;
581 anchor->last->next = elem;
583 verify_list(
"add", anchor);
593 elem->next = before->next;
594 elem->next->prev = elem;
596 if (before == anchor->last) anchor->last = elem;
597 verify_list(
"add", anchor);
600#define ADD_ELEM(anchor, elem) ADD_ELEM(iseq, (anchor), (elem))
601#define APPEND_ELEM(anchor, before, elem) APPEND_ELEM(iseq, (anchor), (before), (elem))
605branch_coverage_valid_p(
rb_iseq_t *iseq,
int first_line)
607 if (!ISEQ_COVERAGE(iseq))
return 0;
608 if (!ISEQ_BRANCH_COVERAGE(iseq))
return 0;
609 if (first_line <= 0)
return 0;
616 const int first_lineno = loc->beg_pos.lineno, first_column = loc->beg_pos.column;
617 const int last_lineno = loc->end_pos.lineno, last_column = loc->end_pos.column;
620 rb_hash_aset(structure, key, branch);
632 if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno))
return Qundef;
643 VALUE branch_base = rb_hash_aref(structure, key);
646 if (
NIL_P(branch_base)) {
647 branch_base = setup_branch(loc,
type, structure, key);
648 branches = rb_hash_new();
660generate_dummy_line_node(
int lineno,
int node_id)
663 nd_set_line(&dummy, lineno);
664 nd_set_node_id(&dummy, node_id);
671 if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno))
return;
682 VALUE branch = rb_hash_aref(branches, key);
686 branch = setup_branch(loc,
type, branches, key);
696 ADD_TRACE_WITH_DATA(seq, RUBY_EVENT_COVERAGE_BRANCH, counter_idx);
697 ADD_SYNTHETIC_INSN(seq, loc->end_pos.lineno, node_id, nop);
700#define ISEQ_LAST_LINE(iseq) (ISEQ_COMPILE_DATA(iseq)->last_line)
703validate_label(st_data_t name, st_data_t label, st_data_t arg)
707 if (!lobj->link.next) {
709 COMPILE_ERROR(iseq, lobj->position,
710 "%"PRIsVALUE
": undefined label",
720 st_foreach(labels_table, validate_label, (st_data_t)iseq);
721 st_free_table(labels_table);
725get_nd_recv(
const NODE *node)
727 switch (nd_type(node)) {
729 return RNODE_CALL(node)->nd_recv;
731 return RNODE_OPCALL(node)->nd_recv;
735 return RNODE_QCALL(node)->nd_recv;
739 return RNODE_ATTRASGN(node)->nd_recv;
741 return RNODE_OP_ASGN1(node)->nd_recv;
743 return RNODE_OP_ASGN2(node)->nd_recv;
745 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
750get_node_call_nd_mid(
const NODE *node)
752 switch (nd_type(node)) {
754 return RNODE_CALL(node)->nd_mid;
756 return RNODE_OPCALL(node)->nd_mid;
758 return RNODE_FCALL(node)->nd_mid;
760 return RNODE_QCALL(node)->nd_mid;
762 return RNODE_VCALL(node)->nd_mid;
764 return RNODE_ATTRASGN(node)->nd_mid;
766 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
771get_nd_args(
const NODE *node)
773 switch (nd_type(node)) {
775 return RNODE_CALL(node)->nd_args;
777 return RNODE_OPCALL(node)->nd_args;
779 return RNODE_FCALL(node)->nd_args;
781 return RNODE_QCALL(node)->nd_args;
785 return RNODE_ATTRASGN(node)->nd_args;
787 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
792get_node_colon_nd_mid(
const NODE *node)
794 switch (nd_type(node)) {
796 return RNODE_COLON2(node)->nd_mid;
798 return RNODE_COLON3(node)->nd_mid;
800 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
805get_nd_vid(
const NODE *node)
807 switch (nd_type(node)) {
809 return RNODE_LASGN(node)->nd_vid;
811 return RNODE_DASGN(node)->nd_vid;
813 return RNODE_IASGN(node)->nd_vid;
815 return RNODE_CVASGN(node)->nd_vid;
817 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
822get_nd_value(
const NODE *node)
824 switch (nd_type(node)) {
826 return RNODE_LASGN(node)->nd_value;
828 return RNODE_DASGN(node)->nd_value;
830 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
835get_string_value(
const NODE *node)
837 switch (nd_type(node)) {
839 return RB_OBJ_SET_SHAREABLE(rb_node_str_string_val(node));
841 return RB_OBJ_SET_SHAREABLE(rb_node_file_path_val(node));
843 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
853 (*ifunc->func)(iseq, ret, ifunc->data);
855 ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
857 CHECK(iseq_setup_insn(iseq, ret));
858 return iseq_setup(iseq, ret);
861static bool drop_unreachable_return(
LINK_ANCHOR *ret);
870 NO_CHECK(COMPILE(ret,
"nil", node));
871 iseq_set_local_table(iseq, 0, 0);
874 else if (nd_type_p(node, NODE_SCOPE)) {
876 iseq_set_local_table(iseq, RNODE_SCOPE(node)->nd_tbl, (
NODE *)RNODE_SCOPE(node)->nd_args);
877 iseq_set_arguments(iseq, ret, (
NODE *)RNODE_SCOPE(node)->nd_args);
878 iseq_set_parameters_lvar_state(iseq);
880 switch (ISEQ_BODY(iseq)->
type) {
881 case ISEQ_TYPE_BLOCK:
883 LABEL *start = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(0);
884 LABEL *end = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(0);
886 start->rescued = LABEL_RESCUE_BEG;
887 end->rescued = LABEL_RESCUE_END;
890 ADD_SYNTHETIC_INSN(ret, ISEQ_BODY(iseq)->location.first_lineno, -1, nop);
891 ADD_LABEL(ret, start);
892 CHECK(COMPILE(ret,
"block body", RNODE_SCOPE(node)->nd_body));
895 ISEQ_COMPILE_DATA(iseq)->last_line = ISEQ_BODY(iseq)->location.code_location.end_pos.lineno;
898 ADD_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
899 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, start, end, NULL, end);
902 case ISEQ_TYPE_CLASS:
905 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
907 ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
910 case ISEQ_TYPE_METHOD:
912 ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
914 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
915 ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
917 ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
921 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
928#define INVALID_ISEQ_TYPE(type) \
929 ISEQ_TYPE_##type: m = #type; goto invalid_iseq_type
930 switch (ISEQ_BODY(iseq)->
type) {
931 case INVALID_ISEQ_TYPE(
METHOD);
932 case INVALID_ISEQ_TYPE(CLASS);
933 case INVALID_ISEQ_TYPE(BLOCK);
934 case INVALID_ISEQ_TYPE(EVAL);
935 case INVALID_ISEQ_TYPE(MAIN);
936 case INVALID_ISEQ_TYPE(TOP);
937#undef INVALID_ISEQ_TYPE
938 case ISEQ_TYPE_RESCUE:
939 iseq_set_exception_local_table(iseq);
940 CHECK(COMPILE(ret,
"rescue", node));
942 case ISEQ_TYPE_ENSURE:
943 iseq_set_exception_local_table(iseq);
944 CHECK(COMPILE_POPPED(ret,
"ensure", node));
946 case ISEQ_TYPE_PLAIN:
947 CHECK(COMPILE(ret,
"ensure", node));
950 COMPILE_ERROR(ERROR_ARGS
"unknown scope: %d", ISEQ_BODY(iseq)->
type);
953 COMPILE_ERROR(ERROR_ARGS
"compile/ISEQ_TYPE_%s should not be reached", m);
958 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE || ISEQ_BODY(iseq)->
type == ISEQ_TYPE_ENSURE) {
959 NODE dummy_line_node = generate_dummy_line_node(0, -1);
960 ADD_GETLOCAL(ret, &dummy_line_node, LVAR_ERRINFO, 0);
961 ADD_INSN1(ret, &dummy_line_node,
throw,
INT2FIX(0) );
963 else if (!drop_unreachable_return(ret)) {
964 ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
968 if (ISEQ_COMPILE_DATA(iseq)->labels_table) {
969 st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
970 ISEQ_COMPILE_DATA(iseq)->labels_table = 0;
971 validate_labels(iseq, labels_table);
974 CHECK(iseq_setup_insn(iseq, ret));
975 return iseq_setup(iseq, ret);
979rb_iseq_translate_threaded_code(
rb_iseq_t *iseq)
981#if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
982 const void *
const *table = rb_vm_get_insns_address_table();
984 VALUE *encoded = (
VALUE *)ISEQ_BODY(iseq)->iseq_encoded;
986 for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; ) {
987 int insn = (int)ISEQ_BODY(iseq)->iseq_encoded[i];
988 int len = insn_len(insn);
989 encoded[i] = (
VALUE)table[insn];
996 rb_yjit_live_iseq_count++;
997 rb_yjit_iseq_alloc_count++;
1004rb_iseq_original_iseq(
const rb_iseq_t *iseq)
1006 VALUE *original_code;
1008 if (ISEQ_ORIGINAL_ISEQ(iseq))
return ISEQ_ORIGINAL_ISEQ(iseq);
1009 original_code = ISEQ_ORIGINAL_ISEQ_ALLOC(iseq, ISEQ_BODY(iseq)->iseq_size);
1010 MEMCPY(original_code, ISEQ_BODY(iseq)->iseq_encoded,
VALUE, ISEQ_BODY(iseq)->iseq_size);
1012#if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
1016 for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; ) {
1017 const void *addr = (
const void *)original_code[i];
1018 const int insn = rb_vm_insn_addr2insn(addr);
1020 original_code[i] = insn;
1021 i += insn_len(insn);
1025 return original_code;
1038#if defined(__sparc) && SIZEOF_VOIDP == 4 && defined(__GNUC__)
1039 #define STRICT_ALIGNMENT
1045#if defined(__OpenBSD__)
1046 #include <sys/endian.h>
1047 #ifdef __STRICT_ALIGNMENT
1048 #define STRICT_ALIGNMENT
1052#ifdef STRICT_ALIGNMENT
1053 #if defined(HAVE_TRUE_LONG_LONG) && SIZEOF_LONG_LONG > SIZEOF_VALUE
1054 #define ALIGNMENT_SIZE SIZEOF_LONG_LONG
1056 #define ALIGNMENT_SIZE SIZEOF_VALUE
1058 #define PADDING_SIZE_MAX ((size_t)((ALIGNMENT_SIZE) - 1))
1059 #define ALIGNMENT_SIZE_MASK PADDING_SIZE_MAX
1062 #define PADDING_SIZE_MAX 0
1065#ifdef STRICT_ALIGNMENT
1068calc_padding(
void *ptr,
size_t size)
1073 mis = (size_t)ptr & ALIGNMENT_SIZE_MASK;
1075 padding = ALIGNMENT_SIZE - mis;
1081#if ALIGNMENT_SIZE > SIZEOF_VALUE
1082 if (size ==
sizeof(
VALUE) && padding ==
sizeof(
VALUE)) {
1096#ifdef STRICT_ALIGNMENT
1097 size_t padding = calc_padding((
void *)&storage->buff[storage->pos], size);
1099 const size_t padding = 0;
1102 if (size >= INT_MAX - padding) rb_memerror();
1103 if (storage->pos + size + padding > storage->size) {
1104 unsigned int alloc_size = storage->size;
1106 while (alloc_size < size + PADDING_SIZE_MAX) {
1107 if (alloc_size >= INT_MAX / 2) rb_memerror();
1110 storage->next = (
void *)
ALLOC_N(
char, alloc_size +
1112 storage = *arena = storage->next;
1115 storage->size = alloc_size;
1116#ifdef STRICT_ALIGNMENT
1117 padding = calc_padding((
void *)&storage->buff[storage->pos], size);
1121#ifdef STRICT_ALIGNMENT
1122 storage->pos += (int)padding;
1125 ptr = (
void *)&storage->buff[storage->pos];
1126 storage->pos += (int)size;
1131compile_data_alloc(
rb_iseq_t *iseq,
size_t size)
1134 return compile_data_alloc_with_arena(arena, size);
1138compile_data_alloc2(
rb_iseq_t *iseq,
size_t x,
size_t y)
1141 return compile_data_alloc(iseq, size);
1145compile_data_calloc2(
rb_iseq_t *iseq,
size_t x,
size_t y)
1148 void *p = compile_data_alloc(iseq, size);
1157 return (
INSN *)compile_data_alloc_with_arena(arena,
sizeof(
INSN));
1161compile_data_alloc_label(
rb_iseq_t *iseq)
1163 return (
LABEL *)compile_data_alloc(iseq,
sizeof(
LABEL));
1167compile_data_alloc_adjust(
rb_iseq_t *iseq)
1169 return (
ADJUST *)compile_data_alloc(iseq,
sizeof(
ADJUST));
1173compile_data_alloc_trace(
rb_iseq_t *iseq)
1175 return (
TRACE *)compile_data_alloc(iseq,
sizeof(
TRACE));
1184 elem2->next = elem1->next;
1185 elem2->prev = elem1;
1186 elem1->next = elem2;
1188 elem2->next->prev = elem2;
1198 elem2->prev = elem1->prev;
1199 elem2->next = elem1;
1200 elem1->prev = elem2;
1202 elem2->prev->next = elem2;
1212 elem2->prev = elem1->prev;
1213 elem2->next = elem1->next;
1215 elem1->prev->next = elem2;
1218 elem1->next->prev = elem2;
1225 elem->prev->next = elem->next;
1227 elem->next->prev = elem->prev;
1234 return anchor->anchor.next;
1240 return anchor->last;
1247 switch (elem->type) {
1248 case ISEQ_ELEMENT_INSN:
1249 case ISEQ_ELEMENT_ADJUST:
1259LIST_INSN_SIZE_ONE(
const LINK_ANCHOR *
const anchor)
1261 LINK_ELEMENT *first_insn = ELEM_FIRST_INSN(FIRST_ELEMENT(anchor));
1262 if (first_insn != NULL &&
1263 ELEM_FIRST_INSN(first_insn->next) == NULL) {
1272LIST_INSN_SIZE_ZERO(
const LINK_ANCHOR *
const anchor)
1274 if (ELEM_FIRST_INSN(FIRST_ELEMENT(anchor)) == NULL) {
1292 if (anc2->anchor.next) {
1295 anc1->last->next = anc2->anchor.next;
1296 anc2->anchor.next->prev = anc1->last;
1297 anc1->last = anc2->last;
1302 verify_list(
"append", anc1);
1305#define APPEND_LIST(anc1, anc2) APPEND_LIST(iseq, (anc1), (anc2))
1314 printf(
"anch: %p, frst: %p, last: %p\n", (
void *)&anchor->anchor,
1315 (
void *)anchor->anchor.next, (
void *)anchor->last);
1317 printf(
"curr: %p, next: %p, prev: %p, type: %d\n", (
void *)list, (
void *)list->next,
1318 (
void *)list->prev, (
int)list->type);
1323 dump_disasm_list_with_cursor(anchor->anchor.next, cur, 0);
1324 verify_list(
"debug list", anchor);
1327#define debug_list(anc, cur) debug_list(iseq, (anc), (cur))
1330#define debug_list(anc, cur) ((void)0)
1336 TRACE *trace = compile_data_alloc_trace(iseq);
1338 trace->link.type = ISEQ_ELEMENT_TRACE;
1339 trace->link.next = NULL;
1340 trace->event = event;
1347new_label_body(
rb_iseq_t *iseq,
long line)
1349 LABEL *labelobj = compile_data_alloc_label(iseq);
1351 labelobj->link.type = ISEQ_ELEMENT_LABEL;
1352 labelobj->link.next = 0;
1354 labelobj->label_no = ISEQ_COMPILE_DATA(iseq)->label_no++;
1355 labelobj->sc_state = 0;
1357 labelobj->refcnt = 0;
1359 labelobj->rescued = LABEL_RESCUE_NONE;
1360 labelobj->unremovable = 0;
1361 labelobj->position = -1;
1368 ADJUST *adjust = compile_data_alloc_adjust(iseq);
1369 adjust->link.type = ISEQ_ELEMENT_ADJUST;
1370 adjust->link.next = 0;
1371 adjust->label = label;
1372 adjust->line_no = line;
1373 LABEL_UNREMOVABLE(label);
1380 const char *types = insn_op_types(insn->insn_id);
1381 for (
int j = 0; types[j]; j++) {
1382 char type = types[j];
1389 func(&OPERAND_AT(insn, j), data);
1398iseq_insn_each_object_write_barrier(
VALUE * obj,
VALUE iseq)
1407new_insn_core(
rb_iseq_t *iseq,
int line_no,
int node_id,
int insn_id,
int argc,
VALUE *argv)
1409 INSN *iobj = compile_data_alloc_insn(iseq);
1413 iobj->link.type = ISEQ_ELEMENT_INSN;
1414 iobj->link.next = 0;
1415 iobj->insn_id = insn_id;
1416 iobj->insn_info.line_no = line_no;
1417 iobj->insn_info.node_id = node_id;
1418 iobj->insn_info.events = 0;
1419 iobj->operands = argv;
1420 iobj->operand_size = argc;
1423 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_write_barrier, (
VALUE)iseq);
1429new_insn_body(
rb_iseq_t *iseq,
int line_no,
int node_id,
enum ruby_vminsn_type insn_id,
int argc, ...)
1431 VALUE *operands = 0;
1435 va_start(argv, argc);
1436 operands = compile_data_alloc2(iseq,
sizeof(
VALUE), argc);
1437 for (i = 0; i < argc; i++) {
1443 return new_insn_core(iseq, line_no, node_id, insn_id, argc, operands);
1447insn_replace_with_operands(
rb_iseq_t *iseq,
INSN *iobj,
enum ruby_vminsn_type insn_id,
int argc, ...)
1449 VALUE *operands = 0;
1453 va_start(argv, argc);
1454 operands = compile_data_alloc2(iseq,
sizeof(
VALUE), argc);
1455 for (i = 0; i < argc; i++) {
1462 iobj->insn_id = insn_id;
1463 iobj->operand_size = argc;
1464 iobj->operands = operands;
1465 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_write_barrier, (
VALUE)iseq);
1473 VM_ASSERT(argc >= 0);
1476 flag |= VM_CALL_KWARG;
1477 argc += kw_arg->keyword_len;
1480 if (!(flag & (VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_BLOCKARG | VM_CALL_KWARG | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING))
1481 && !has_blockiseq) {
1482 flag |= VM_CALL_ARGS_SIMPLE;
1485 ISEQ_BODY(iseq)->ci_size++;
1486 const struct rb_callinfo *ci = vm_ci_new(mid, flag, argc, kw_arg);
1494 VALUE *operands = compile_data_calloc2(iseq,
sizeof(
VALUE), 2);
1497 operands[1] = (
VALUE)blockiseq;
1504 if (vm_ci_flag((
struct rb_callinfo *)ci) & VM_CALL_FORWARDING) {
1505 insn = new_insn_core(iseq, line_no, node_id, BIN(sendforward), 2, operands);
1508 insn = new_insn_core(iseq, line_no, node_id, BIN(send), 2, operands);
1521 VALUE ast_value = rb_ruby_ast_new(node);
1523 debugs(
"[new_child_iseq]> ---------------------------------------\n");
1524 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1525 ret_iseq = rb_iseq_new_with_opt(ast_value, name,
1526 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1528 isolated_depth ? isolated_depth + 1 : 0,
1529 type, ISEQ_COMPILE_DATA(iseq)->option,
1530 ISEQ_BODY(iseq)->variable.script_lines);
1531 debugs(
"[new_child_iseq]< ---------------------------------------\n");
1541 debugs(
"[new_child_iseq_with_callback]> ---------------------------------------\n");
1542 ret_iseq = rb_iseq_new_with_callback(ifunc, name,
1543 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1544 line_no, parent,
type, ISEQ_COMPILE_DATA(iseq)->option);
1545 debugs(
"[new_child_iseq_with_callback]< ---------------------------------------\n");
1553 ISEQ_COMPILE_DATA(iseq)->catch_except_p =
true;
1554 if (ISEQ_BODY(iseq)->parent_iseq != NULL) {
1555 if (ISEQ_COMPILE_DATA(ISEQ_BODY(iseq)->parent_iseq)) {
1556 set_catch_except_p((
rb_iseq_t *) ISEQ_BODY(iseq)->parent_iseq);
1579 while (pos < body->iseq_size) {
1580 insn = rb_vm_insn_decode(body->iseq_encoded[pos]);
1581 if (insn == BIN(
throw)) {
1582 set_catch_except_p(iseq);
1585 pos += insn_len(insn);
1591 for (i = 0; i < ct->size; i++) {
1593 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1594 if (entry->type != CATCH_TYPE_BREAK
1595 && entry->type != CATCH_TYPE_NEXT
1596 && entry->type != CATCH_TYPE_REDO) {
1598 ISEQ_COMPILE_DATA(iseq)->catch_except_p =
true;
1605iseq_insert_nop_between_end_and_cont(
rb_iseq_t *iseq)
1607 VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
1608 if (
NIL_P(catch_table_ary))
return;
1609 unsigned int i, tlen = (
unsigned int)
RARRAY_LEN(catch_table_ary);
1611 for (i = 0; i < tlen; i++) {
1617 enum rb_catch_type ct = (
enum rb_catch_type)(ptr[0] & 0xffff);
1619 if (ct != CATCH_TYPE_BREAK
1620 && ct != CATCH_TYPE_NEXT
1621 && ct != CATCH_TYPE_REDO) {
1623 for (e = end; e && (IS_LABEL(e) || IS_TRACE(e)); e = e->next) {
1625 INSN *nop = new_insn_core(iseq, 0, -1, BIN(nop), 0, 0);
1626 ELEM_INSERT_NEXT(end, &nop->link);
1639 if (
RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1644 if (compile_debug > 5)
1645 dump_disasm_list(FIRST_ELEMENT(anchor));
1647 debugs(
"[compile step 3.1 (iseq_optimize)]\n");
1648 iseq_optimize(iseq, anchor);
1650 if (compile_debug > 5)
1651 dump_disasm_list(FIRST_ELEMENT(anchor));
1653 if (ISEQ_COMPILE_DATA(iseq)->option->instructions_unification) {
1654 debugs(
"[compile step 3.2 (iseq_insns_unification)]\n");
1655 iseq_insns_unification(iseq, anchor);
1656 if (compile_debug > 5)
1657 dump_disasm_list(FIRST_ELEMENT(anchor));
1660 debugs(
"[compile step 3.4 (iseq_insert_nop_between_end_and_cont)]\n");
1661 iseq_insert_nop_between_end_and_cont(iseq);
1662 if (compile_debug > 5)
1663 dump_disasm_list(FIRST_ELEMENT(anchor));
1671 if (
RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1674 debugs(
"[compile step 4.1 (iseq_set_sequence)]\n");
1675 if (!iseq_set_sequence(iseq, anchor))
return COMPILE_NG;
1676 if (compile_debug > 5)
1677 dump_disasm_list(FIRST_ELEMENT(anchor));
1679 debugs(
"[compile step 4.2 (iseq_set_exception_table)]\n");
1680 if (!iseq_set_exception_table(iseq))
return COMPILE_NG;
1682 debugs(
"[compile step 4.3 (set_optargs_table)] \n");
1683 if (!iseq_set_optargs_table(iseq))
return COMPILE_NG;
1685 debugs(
"[compile step 5 (iseq_translate_threaded_code)] \n");
1686 if (!rb_iseq_translate_threaded_code(iseq))
return COMPILE_NG;
1688 debugs(
"[compile step 6 (update_catch_except_flags)] \n");
1690 update_catch_except_flags(iseq, ISEQ_BODY(iseq));
1692 debugs(
"[compile step 6.1 (remove unused catch tables)] \n");
1694 if (!ISEQ_COMPILE_DATA(iseq)->catch_except_p && ISEQ_BODY(iseq)->catch_table) {
1695 xfree(ISEQ_BODY(iseq)->catch_table);
1696 ISEQ_BODY(iseq)->catch_table = NULL;
1699#if VM_INSN_INFO_TABLE_IMPL == 2
1700 if (ISEQ_BODY(iseq)->insns_info.succ_index_table == NULL) {
1701 debugs(
"[compile step 7 (rb_iseq_insns_info_encode_positions)] \n");
1702 rb_iseq_insns_info_encode_positions(iseq);
1706 if (compile_debug > 1) {
1707 VALUE str = rb_iseq_disasm(iseq);
1710 verify_call_cache(iseq);
1711 debugs(
"[compile step: finish]\n");
1717iseq_set_exception_local_table(
rb_iseq_t *iseq)
1719 ISEQ_BODY(iseq)->local_table_size = numberof(rb_iseq_shared_exc_local_tbl);
1720 ISEQ_BODY(iseq)->local_table = rb_iseq_shared_exc_local_tbl;
1721 ISEQ_BODY(iseq)->lvar_states = NULL;
1729 while (iseq != ISEQ_BODY(iseq)->local_iseq) {
1731 iseq = ISEQ_BODY(iseq)->parent_iseq;
1737get_dyna_var_idx_at_raw(
const rb_iseq_t *iseq,
ID id)
1741 for (i = 0; i < ISEQ_BODY(iseq)->local_table_size; i++) {
1742 if (ISEQ_BODY(iseq)->local_table[i] == id) {
1752 int idx = get_dyna_var_idx_at_raw(ISEQ_BODY(iseq)->local_iseq,
id);
1755 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
1756 "get_local_var_idx: %d", idx);
1763get_dyna_var_idx(
const rb_iseq_t *iseq,
ID id,
int *level,
int *ls)
1765 int lv = 0, idx = -1;
1766 const rb_iseq_t *
const topmost_iseq = iseq;
1769 idx = get_dyna_var_idx_at_raw(iseq,
id);
1773 iseq = ISEQ_BODY(iseq)->parent_iseq;
1778 COMPILE_ERROR(topmost_iseq, ISEQ_LAST_LINE(topmost_iseq),
1779 "get_dyna_var_idx: -1");
1783 *ls = ISEQ_BODY(iseq)->local_table_size;
1788iseq_local_block_param_p(
const rb_iseq_t *iseq,
unsigned int idx,
unsigned int level)
1792 iseq = ISEQ_BODY(iseq)->parent_iseq;
1795 body = ISEQ_BODY(iseq);
1796 if (body->local_iseq == iseq &&
1797 body->param.flags.has_block &&
1798 body->local_table_size - body->param.block_start == idx) {
1807iseq_block_param_id_p(
const rb_iseq_t *iseq,
ID id,
int *pidx,
int *plevel)
1810 int idx = get_dyna_var_idx(iseq,
id, &level, &ls);
1811 if (iseq_local_block_param_p(iseq, ls - idx, level)) {
1822access_outer_variables(
const rb_iseq_t *iseq,
int level,
ID id,
bool write)
1824 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1826 if (isolated_depth && level >= isolated_depth) {
1827 if (
id == rb_intern(
"yield")) {
1828 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
"can not yield from isolated Proc");
1831 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
"can not access variable '%s' from isolated Proc", rb_id2name(
id));
1835 for (
int i=0; i<level; i++) {
1837 struct rb_id_table *ovs = ISEQ_BODY(iseq)->outer_variables;
1840 ovs = ISEQ_BODY(iseq)->outer_variables = rb_id_table_create(8);
1843 if (rb_id_table_lookup(ISEQ_BODY(iseq)->outer_variables,
id, &val)) {
1844 if (write && !val) {
1845 rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables,
id,
Qtrue);
1849 rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables,
id, RBOOL(write));
1852 iseq = ISEQ_BODY(iseq)->parent_iseq;
1857iseq_lvar_id(
const rb_iseq_t *iseq,
int idx,
int level)
1859 for (
int i=0; i<level; i++) {
1860 iseq = ISEQ_BODY(iseq)->parent_iseq;
1863 ID id = ISEQ_BODY(iseq)->local_table[ISEQ_BODY(iseq)->local_table_size - idx];
1869update_lvar_state(
const rb_iseq_t *iseq,
int level,
int idx)
1871 for (
int i=0; i<level; i++) {
1872 iseq = ISEQ_BODY(iseq)->parent_iseq;
1875 enum lvar_state *states = ISEQ_BODY(iseq)->lvar_states;
1876 int table_idx = ISEQ_BODY(iseq)->local_table_size - idx;
1877 switch (states[table_idx]) {
1878 case lvar_uninitialized:
1879 states[table_idx] = lvar_initialized;
1881 case lvar_initialized:
1882 states[table_idx] = lvar_reassigned;
1884 case lvar_reassigned:
1888 rb_bug(
"unreachable");
1893iseq_set_parameters_lvar_state(
const rb_iseq_t *iseq)
1895 for (
unsigned int i=0; i<ISEQ_BODY(iseq)->param.size; i++) {
1896 ISEQ_BODY(iseq)->lvar_states[i] = lvar_initialized;
1899 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
1900 int opt_num = ISEQ_BODY(iseq)->param.opt_num;
1901 for (
int i=0; i<opt_num; i++) {
1902 ISEQ_BODY(iseq)->lvar_states[lead_num + i] = lvar_uninitialized;
1911 if (iseq_local_block_param_p(iseq, idx, level)) {
1912 ADD_INSN2(seq, line_node, getblockparam,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1915 ADD_INSN2(seq, line_node, getlocal,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1917 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level),
Qfalse);
1923 if (iseq_local_block_param_p(iseq, idx, level)) {
1924 ADD_INSN2(seq, line_node, setblockparam,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1927 ADD_INSN2(seq, line_node, setlocal,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1929 update_lvar_state(iseq, level, idx);
1930 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level),
Qtrue);
1939 if (body->param.flags.has_opt ||
1940 body->param.flags.has_post ||
1941 body->param.flags.has_rest ||
1942 body->param.flags.has_block ||
1943 body->param.flags.has_kw ||
1944 body->param.flags.has_kwrest) {
1946 if (body->param.flags.has_block) {
1947 body->param.size = body->param.block_start + 1;
1949 else if (body->param.flags.has_kwrest) {
1950 body->param.size = body->param.keyword->rest_start + 1;
1952 else if (body->param.flags.has_kw) {
1953 body->param.size = body->param.keyword->bits_start + 1;
1955 else if (body->param.flags.has_post) {
1956 body->param.size = body->param.post_start + body->param.post_num;
1958 else if (body->param.flags.has_rest) {
1959 body->param.size = body->param.rest_start + 1;
1961 else if (body->param.flags.has_opt) {
1962 body->param.size = body->param.lead_num + body->param.opt_num;
1969 body->param.size = body->param.lead_num;
1979 struct rb_iseq_param_keyword *keyword;
1982 int kw = 0, rkw = 0, di = 0, i;
1984 body->param.flags.has_kw = TRUE;
1985 body->param.keyword = keyword =
ZALLOC_N(
struct rb_iseq_param_keyword, 1);
1989 node = node->nd_next;
1992 keyword->bits_start = arg_size++;
1994 node = args->kw_args;
1996 const NODE *val_node = get_nd_value(node->nd_body);
1999 if (val_node == NODE_SPECIAL_REQUIRED_KEYWORD) {
2003 switch (nd_type(val_node)) {
2005 dv = rb_node_sym_string_val(val_node);
2008 dv = rb_node_regx_string_val(val_node);
2011 dv = rb_node_line_lineno_val(val_node);
2014 dv = rb_node_integer_literal_val(val_node);
2017 dv = rb_node_float_literal_val(val_node);
2020 dv = rb_node_rational_literal_val(val_node);
2022 case NODE_IMAGINARY:
2023 dv = rb_node_imaginary_literal_val(val_node);
2026 dv = rb_node_encoding_val(val_node);
2038 NO_CHECK(COMPILE_POPPED(optargs,
"kwarg", RNODE(node)));
2042 keyword->num = ++di;
2046 node = node->nd_next;
2051 if (RNODE_DVAR(args->kw_rest_arg)->nd_vid != 0) {
2052 ID kw_id = ISEQ_BODY(iseq)->local_table[arg_size];
2053 keyword->rest_start = arg_size++;
2054 body->param.flags.has_kwrest = TRUE;
2056 if (kw_id == idPow) body->param.flags.anon_kwrest = TRUE;
2058 keyword->required_num = rkw;
2059 keyword->table = &body->local_table[keyword->bits_start - keyword->num];
2064 for (i = 0; i <
RARRAY_LEN(default_values); i++) {
2066 if (dv == complex_mark) dv =
Qundef;
2071 keyword->default_values = dvs;
2080 if (!body->param.flags.use_block) {
2081 body->param.flags.use_block = 1;
2086 st_data_t key = (st_data_t)rb_intern_str(body->location.label);
2087 set_insert(vm->unused_block_warning_table, key);
2095 debugs(
"iseq_set_arguments: %s\n", node_args ?
"" :
"0");
2099 struct rb_args_info *args = &RNODE_ARGS(node_args)->nd_ainfo;
2105 EXPECT_NODE(
"iseq_set_arguments", node_args, NODE_ARGS, COMPILE_NG);
2107 body->param.lead_num = arg_size = (int)args->pre_args_num;
2108 if (body->param.lead_num > 0) body->param.flags.has_lead = TRUE;
2109 debugs(
" - argc: %d\n", body->param.lead_num);
2111 rest_id = args->rest_arg;
2112 if (rest_id == NODE_SPECIAL_EXCESSIVE_COMMA) {
2116 block_id = args->block_arg;
2118 bool optimized_forward = (args->forwarding && args->pre_args_num == 0 && !args->opt_args);
2120 if (optimized_forward) {
2125 if (args->opt_args) {
2133 label = NEW_LABEL(nd_line(RNODE(node)));
2135 ADD_LABEL(optargs, label);
2136 NO_CHECK(COMPILE_POPPED(optargs,
"optarg", node->nd_body));
2137 node = node->nd_next;
2142 label = NEW_LABEL(nd_line(node_args));
2144 ADD_LABEL(optargs, label);
2149 for (j = 0; j < i+1; j++) {
2154 body->param.flags.has_opt = TRUE;
2155 body->param.opt_num = i;
2156 body->param.opt_table = opt_table;
2161 body->param.rest_start = arg_size++;
2162 body->param.flags.has_rest = TRUE;
2163 if (rest_id ==
'*') body->param.flags.anon_rest = TRUE;
2167 if (args->first_post_arg) {
2168 body->param.post_start = arg_size;
2169 body->param.post_num = args->post_args_num;
2170 body->param.flags.has_post = TRUE;
2171 arg_size += args->post_args_num;
2173 if (body->param.flags.has_rest) {
2174 body->param.post_start = body->param.rest_start + 1;
2178 if (args->kw_args) {
2179 arg_size = iseq_set_arguments_keywords(iseq, optargs, args, arg_size);
2181 else if (args->kw_rest_arg && !optimized_forward) {
2182 ID kw_id = ISEQ_BODY(iseq)->local_table[arg_size];
2183 struct rb_iseq_param_keyword *keyword =
ZALLOC_N(
struct rb_iseq_param_keyword, 1);
2184 keyword->rest_start = arg_size++;
2185 body->param.keyword = keyword;
2186 body->param.flags.has_kwrest = TRUE;
2188 static ID anon_kwrest = 0;
2189 if (!anon_kwrest) anon_kwrest = rb_intern(
"**");
2190 if (kw_id == anon_kwrest) body->param.flags.anon_kwrest = TRUE;
2192 else if (args->no_kwarg) {
2193 body->param.flags.accepts_no_kwarg = TRUE;
2197 body->param.block_start = arg_size++;
2198 body->param.flags.has_block = TRUE;
2199 iseq_set_use_block(iseq);
2203 if (optimized_forward) {
2204 body->param.flags.use_block = 1;
2205 body->param.flags.forwardable = TRUE;
2209 iseq_calc_param_size(iseq);
2210 body->param.size = arg_size;
2212 if (args->pre_init) {
2213 NO_CHECK(COMPILE_POPPED(optargs,
"init arguments (m)", args->pre_init));
2215 if (args->post_init) {
2216 NO_CHECK(COMPILE_POPPED(optargs,
"init arguments (p)", args->post_init));
2219 if (body->type == ISEQ_TYPE_BLOCK) {
2220 if (body->param.flags.has_opt == FALSE &&
2221 body->param.flags.has_post == FALSE &&
2222 body->param.flags.has_rest == FALSE &&
2223 body->param.flags.has_kw == FALSE &&
2224 body->param.flags.has_kwrest == FALSE) {
2226 if (body->param.lead_num == 1 && last_comma == 0) {
2228 body->param.flags.ambiguous_param0 = TRUE;
2240 unsigned int size = tbl ? tbl->size : 0;
2241 unsigned int offset = 0;
2244 struct rb_args_info *args = &RNODE_ARGS(node_args)->nd_ainfo;
2249 if (args->forwarding && args->pre_args_num == 0 && !args->opt_args) {
2258 MEMCPY(ids, tbl->ids + offset,
ID, size);
2259 ISEQ_BODY(iseq)->local_table = ids;
2261 enum lvar_state *states =
ALLOC_N(
enum lvar_state, size);
2263 for (
unsigned int i=0; i<size; i++) {
2264 states[i] = lvar_uninitialized;
2267 ISEQ_BODY(iseq)->lvar_states = states;
2269 ISEQ_BODY(iseq)->local_table_size = size;
2271 debugs(
"iseq_set_local_table: %u\n", ISEQ_BODY(iseq)->local_table_size);
2283 else if ((tlit = OBJ_BUILTIN_TYPE(lit)) == -1) {
2286 else if ((tval = OBJ_BUILTIN_TYPE(val)) == -1) {
2289 else if (tlit != tval) {
2299 long x =
FIX2LONG(rb_big_cmp(lit, val));
2307 return rb_float_cmp(lit, val);
2310 const struct RRational *rat1 = RRATIONAL(val);
2311 const struct RRational *rat2 = RRATIONAL(lit);
2312 return rb_iseq_cdhash_cmp(rat1->num, rat2->num) || rb_iseq_cdhash_cmp(rat1->den, rat2->den);
2315 const struct RComplex *comp1 = RCOMPLEX(val);
2316 const struct RComplex *comp2 = RCOMPLEX(lit);
2317 return rb_iseq_cdhash_cmp(comp1->real, comp2->real) || rb_iseq_cdhash_cmp(comp1->imag, comp2->imag);
2320 return rb_reg_equal(val, lit) ? 0 : -1;
2328rb_iseq_cdhash_hash(
VALUE a)
2330 switch (OBJ_BUILTIN_TYPE(a)) {
2333 return (st_index_t)a;
2341 return rb_rational_hash(a);
2343 return rb_complex_hash(a);
2353 rb_iseq_cdhash_hash,
2367 rb_hash_aset(data->hash, key,
INT2FIX(lobj->position - (data->pos+data->len)));
2375 return INT2FIX(ISEQ_BODY(iseq)->ivc_size++);
2382 struct rb_id_table *tbl = ISEQ_COMPILE_DATA(iseq)->ivar_cache_table;
2384 if (rb_id_table_lookup(tbl,
id,&val)) {
2389 tbl = rb_id_table_create(1);
2390 ISEQ_COMPILE_DATA(iseq)->ivar_cache_table = tbl;
2392 val =
INT2FIX(ISEQ_BODY(iseq)->icvarc_size++);
2393 rb_id_table_insert(tbl,
id,val);
2397#define BADINSN_DUMP(anchor, list, dest) \
2398 dump_disasm_list_with_cursor(FIRST_ELEMENT(anchor), list, dest)
2400#define BADINSN_ERROR \
2401 (xfree(generated_iseq), \
2402 xfree(insns_info), \
2403 BADINSN_DUMP(anchor, list, NULL), \
2409 int stack_max = 0, sp = 0, line = 0;
2412 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2413 if (IS_LABEL(list)) {
2419 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2420 switch (list->type) {
2421 case ISEQ_ELEMENT_INSN:
2429 sp = calc_sp_depth(sp, iobj);
2431 BADINSN_DUMP(anchor, list, NULL);
2432 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2433 "argument stack underflow (%d)", sp);
2436 if (sp > stack_max) {
2440 line = iobj->insn_info.line_no;
2442 operands = iobj->operands;
2443 insn = iobj->insn_id;
2444 types = insn_op_types(insn);
2445 len = insn_len(insn);
2448 if (iobj->operand_size !=
len - 1) {
2450 BADINSN_DUMP(anchor, list, NULL);
2451 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2452 "operand size miss! (%d for %d)",
2453 iobj->operand_size,
len - 1);
2457 for (j = 0; types[j]; j++) {
2458 if (types[j] == TS_OFFSET) {
2462 BADINSN_DUMP(anchor, list, NULL);
2463 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2464 "unknown label: "LABEL_FORMAT, lobj->label_no);
2467 if (lobj->sp == -1) {
2470 else if (lobj->sp != sp) {
2471 debugs(
"%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2472 RSTRING_PTR(rb_iseq_path(iseq)), line,
2473 lobj->label_no, lobj->sp, sp);
2479 case ISEQ_ELEMENT_LABEL:
2482 if (lobj->sp == -1) {
2486 if (lobj->sp != sp) {
2487 debugs(
"%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2488 RSTRING_PTR(rb_iseq_path(iseq)), line,
2489 lobj->label_no, lobj->sp, sp);
2495 case ISEQ_ELEMENT_TRACE:
2500 case ISEQ_ELEMENT_ADJUST:
2505 sp = adjust->label ? adjust->label->sp : 0;
2506 if (adjust->line_no != -1 && orig_sp - sp < 0) {
2507 BADINSN_DUMP(anchor, list, NULL);
2508 COMPILE_ERROR(iseq, adjust->line_no,
2509 "iseq_set_sequence: adjust bug %d < %d",
2516 BADINSN_DUMP(anchor, list, NULL);
2517 COMPILE_ERROR(iseq, line,
"unknown list type: %d", list->type);
2526 int insns_info_index,
int code_index,
const INSN *iobj)
2528 if (insns_info_index == 0 ||
2529 insns_info[insns_info_index-1].line_no != iobj->insn_info.line_no ||
2530#ifdef USE_ISEQ_NODE_ID
2531 insns_info[insns_info_index-1].node_id != iobj->insn_info.node_id ||
2533 insns_info[insns_info_index-1].events != iobj->insn_info.events) {
2534 insns_info[insns_info_index].line_no = iobj->insn_info.line_no;
2535#ifdef USE_ISEQ_NODE_ID
2536 insns_info[insns_info_index].node_id = iobj->insn_info.node_id;
2538 insns_info[insns_info_index].events = iobj->insn_info.events;
2539 positions[insns_info_index] = code_index;
2547 int insns_info_index,
int code_index,
const ADJUST *adjust)
2549 insns_info[insns_info_index].line_no = adjust->line_no;
2550 insns_info[insns_info_index].node_id = -1;
2551 insns_info[insns_info_index].events = 0;
2552 positions[insns_info_index] = code_index;
2557array_to_idlist(
VALUE arr)
2562 for (
long i = 0; i < size; i++) {
2571idlist_to_array(
const ID *ids)
2588 unsigned int *positions;
2590 VALUE *generated_iseq;
2594 int insn_num, code_index, insns_info_index, sp = 0;
2595 int stack_max = fix_sp_depth(iseq, anchor);
2597 if (stack_max < 0)
return COMPILE_NG;
2600 insn_num = code_index = 0;
2601 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2602 switch (list->type) {
2603 case ISEQ_ELEMENT_INSN:
2607 sp = calc_sp_depth(sp, iobj);
2609 events = iobj->insn_info.events |= events;
2610 if (ISEQ_COVERAGE(iseq)) {
2611 if (ISEQ_LINE_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_LINE) &&
2612 !(rb_get_coverage_mode() & COVERAGE_TARGET_ONESHOT_LINES)) {
2613 int line = iobj->insn_info.line_no - 1;
2614 if (line >= 0 && line <
RARRAY_LEN(ISEQ_LINE_COVERAGE(iseq))) {
2618 if (ISEQ_BRANCH_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_BRANCH)) {
2619 while (
RARRAY_LEN(ISEQ_PC2BRANCHINDEX(iseq)) <= code_index) {
2625 code_index += insn_data_length(iobj);
2630 case ISEQ_ELEMENT_LABEL:
2633 lobj->position = code_index;
2634 if (lobj->sp != sp) {
2635 debugs(
"%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2636 RSTRING_PTR(rb_iseq_path(iseq)),
2637 lobj->label_no, lobj->sp, sp);
2642 case ISEQ_ELEMENT_TRACE:
2645 events |= trace->event;
2646 if (trace->event & RUBY_EVENT_COVERAGE_BRANCH) data = trace->data;
2649 case ISEQ_ELEMENT_ADJUST:
2652 if (adjust->line_no != -1) {
2654 sp = adjust->label ? adjust->label->sp : 0;
2655 if (orig_sp - sp > 0) {
2656 if (orig_sp - sp > 1) code_index++;
2670 positions =
ALLOC_N(
unsigned int, insn_num);
2671 if (ISEQ_IS_SIZE(body)) {
2675 body->is_entries = NULL;
2678 if (body->ci_size) {
2682 body->call_data = NULL;
2684 ISEQ_COMPILE_DATA(iseq)->ci_index = 0;
2691 iseq_bits_t * mark_offset_bits;
2692 int code_size = code_index;
2694 bool needs_bitmap =
false;
2696 if (ISEQ_MBITS_BUFLEN(code_index) == 1) {
2697 mark_offset_bits = &ISEQ_COMPILE_DATA(iseq)->mark_bits.single;
2698 ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit =
true;
2701 mark_offset_bits =
ZALLOC_N(iseq_bits_t, ISEQ_MBITS_BUFLEN(code_index));
2702 ISEQ_COMPILE_DATA(iseq)->mark_bits.list = mark_offset_bits;
2703 ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit =
false;
2706 ISEQ_COMPILE_DATA(iseq)->iseq_encoded = (
void *)generated_iseq;
2707 ISEQ_COMPILE_DATA(iseq)->iseq_size = code_index;
2709 list = FIRST_ELEMENT(anchor);
2710 insns_info_index = code_index = sp = 0;
2713 switch (list->type) {
2714 case ISEQ_ELEMENT_INSN:
2722 sp = calc_sp_depth(sp, iobj);
2724 operands = iobj->operands;
2725 insn = iobj->insn_id;
2726 generated_iseq[code_index] = insn;
2727 types = insn_op_types(insn);
2728 len = insn_len(insn);
2730 for (j = 0; types[j]; j++) {
2731 char type = types[j];
2739 generated_iseq[code_index + 1 + j] = lobj->position - (code_index +
len);
2744 VALUE map = operands[j];
2747 data.pos = code_index;
2751 rb_hash_rehash(map);
2752 freeze_hide_obj(map);
2754 generated_iseq[code_index + 1 + j] = map;
2755 ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2757 needs_bitmap =
true;
2762 generated_iseq[code_index + 1 + j] =
FIX2INT(operands[j]);
2767 VALUE v = operands[j];
2768 generated_iseq[code_index + 1 + j] = v;
2772 ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2773 needs_bitmap =
true;
2780 unsigned int ic_index = ISEQ_COMPILE_DATA(iseq)->ic_index++;
2781 IC ic = &ISEQ_IS_ENTRY_START(body,
type)[ic_index].ic_cache;
2782 if (UNLIKELY(ic_index >= body->ic_size)) {
2783 BADINSN_DUMP(anchor, &iobj->link, 0);
2784 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2785 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2786 ic_index, ISEQ_IS_SIZE(body));
2789 ic->
segments = array_to_idlist(operands[j]);
2791 generated_iseq[code_index + 1 + j] = (
VALUE)ic;
2796 unsigned int ic_index =
FIX2UINT(operands[j]);
2798 IVC cache = ((
IVC)&body->is_entries[ic_index]);
2800 if (insn == BIN(setinstancevariable)) {
2801 cache->iv_set_name =
SYM2ID(operands[j - 1]);
2804 cache->iv_set_name = 0;
2807 vm_ic_attr_index_initialize(cache, INVALID_SHAPE_ID);
2812 unsigned int ic_index =
FIX2UINT(operands[j]);
2813 IC ic = &ISEQ_IS_ENTRY_START(body,
type)[ic_index].ic_cache;
2814 if (UNLIKELY(ic_index >= ISEQ_IS_SIZE(body))) {
2815 BADINSN_DUMP(anchor, &iobj->link, 0);
2816 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2817 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2818 ic_index, ISEQ_IS_SIZE(body));
2820 generated_iseq[code_index + 1 + j] = (
VALUE)ic;
2827 RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq)->ci_index <= body->ci_size);
2828 struct rb_call_data *cd = &body->call_data[ISEQ_COMPILE_DATA(iseq)->ci_index++];
2830 cd->cc = vm_cc_empty();
2831 generated_iseq[code_index + 1 + j] = (
VALUE)cd;
2835 generated_iseq[code_index + 1 + j] =
SYM2ID(operands[j]);
2838 generated_iseq[code_index + 1 + j] = operands[j];
2841 generated_iseq[code_index + 1 + j] = operands[j];
2844 BADINSN_ERROR(iseq, iobj->insn_info.line_no,
2845 "unknown operand type: %c",
type);
2849 if (add_insn_info(insns_info, positions, insns_info_index, code_index, iobj)) insns_info_index++;
2853 case ISEQ_ELEMENT_LABEL:
2856 if (lobj->sp != sp) {
2857 debugs(
"%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2858 RSTRING_PTR(rb_iseq_path(iseq)),
2859 lobj->label_no, lobj->sp, sp);
2864 case ISEQ_ELEMENT_ADJUST:
2869 if (adjust->label) {
2870 sp = adjust->label->sp;
2876 if (adjust->line_no != -1) {
2877 const int diff = orig_sp - sp;
2879 if (insns_info_index == 0) {
2880 COMPILE_ERROR(iseq, adjust->line_no,
2881 "iseq_set_sequence: adjust bug (ISEQ_ELEMENT_ADJUST must not be the first in iseq)");
2883 if (add_adjust_info(insns_info, positions, insns_info_index, code_index, adjust)) insns_info_index++;
2886 generated_iseq[code_index++] = BIN(adjuststack);
2887 generated_iseq[code_index++] = orig_sp - sp;
2889 else if (diff == 1) {
2890 generated_iseq[code_index++] = BIN(pop);
2892 else if (diff < 0) {
2893 int label_no = adjust->label ? adjust->label->label_no : -1;
2894 xfree(generated_iseq);
2897 if (ISEQ_MBITS_BUFLEN(code_size) > 1) {
2898 xfree(mark_offset_bits);
2900 debug_list(anchor, list);
2901 COMPILE_ERROR(iseq, adjust->line_no,
2902 "iseq_set_sequence: adjust bug to %d %d < %d",
2903 label_no, orig_sp, sp);
2916 body->iseq_encoded = (
void *)generated_iseq;
2917 body->iseq_size = code_index;
2918 body->stack_max = stack_max;
2920 if (ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit) {
2921 body->mark_bits.single = ISEQ_COMPILE_DATA(iseq)->mark_bits.single;
2925 body->mark_bits.list = mark_offset_bits;
2928 body->mark_bits.list = NULL;
2929 ISEQ_COMPILE_DATA(iseq)->mark_bits.list = NULL;
2930 ruby_xfree(mark_offset_bits);
2935 body->insns_info.body = insns_info;
2936 body->insns_info.positions = positions;
2939 body->insns_info.body = insns_info;
2940 REALLOC_N(positions,
unsigned int, insns_info_index);
2941 body->insns_info.positions = positions;
2942 body->insns_info.size = insns_info_index;
2948label_get_position(
LABEL *lobj)
2950 return lobj->position;
2954label_get_sp(
LABEL *lobj)
2960iseq_set_exception_table(
rb_iseq_t *iseq)
2962 const VALUE *tptr, *ptr;
2963 unsigned int tlen, i;
2966 ISEQ_BODY(iseq)->catch_table = NULL;
2968 VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
2969 if (
NIL_P(catch_table_ary))
return COMPILE_OK;
2977 for (i = 0; i < table->size; i++) {
2980 entry = UNALIGNED_MEMBER_PTR(table, entries[i]);
2981 entry->type = (
enum rb_catch_type)(ptr[0] & 0xffff);
2982 pos = label_get_position((
LABEL *)(ptr[1] & ~1));
2984 entry->start = (
unsigned int)pos;
2985 pos = label_get_position((
LABEL *)(ptr[2] & ~1));
2987 entry->end = (
unsigned int)pos;
2994 entry->cont = label_get_position(lobj);
2995 entry->sp = label_get_sp(lobj);
2998 if (entry->type == CATCH_TYPE_RESCUE ||
2999 entry->type == CATCH_TYPE_BREAK ||
3000 entry->type == CATCH_TYPE_NEXT) {
3009 ISEQ_BODY(iseq)->catch_table = table;
3010 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, 0);
3031 VALUE *opt_table = (
VALUE *)ISEQ_BODY(iseq)->param.opt_table;
3033 if (ISEQ_BODY(iseq)->param.flags.has_opt) {
3034 for (i = 0; i < ISEQ_BODY(iseq)->param.opt_num + 1; i++) {
3035 opt_table[i] = label_get_position((
LABEL *)opt_table[i]);
3042get_destination_insn(
INSN *iobj)
3048 list = lobj->link.next;
3050 switch (list->type) {
3051 case ISEQ_ELEMENT_INSN:
3052 case ISEQ_ELEMENT_ADJUST:
3054 case ISEQ_ELEMENT_LABEL:
3057 case ISEQ_ELEMENT_TRACE:
3060 events |= trace->event;
3068 if (list && IS_INSN(list)) {
3070 iobj->insn_info.events |= events;
3076get_next_insn(
INSN *iobj)
3081 if (IS_INSN(list) || IS_ADJUST(list)) {
3090get_prev_insn(
INSN *iobj)
3095 if (IS_INSN(list) || IS_ADJUST(list)) {
3104unref_destination(
INSN *iobj,
int pos)
3106 LABEL *lobj = (
LABEL *)OPERAND_AT(iobj, pos);
3108 if (!lobj->refcnt) ELEM_REMOVE(&lobj->link);
3112replace_destination(
INSN *dobj,
INSN *nobj)
3114 VALUE n = OPERAND_AT(nobj, 0);
3117 if (dl == nl)
return false;
3120 OPERAND_AT(dobj, 0) = n;
3121 if (!dl->refcnt) ELEM_REMOVE(&dl->link);
3126find_destination(
INSN *i)
3128 int pos,
len = insn_len(i->insn_id);
3129 for (pos = 0; pos <
len; ++pos) {
3130 if (insn_op_types(i->insn_id)[pos] == TS_OFFSET) {
3131 return (
LABEL *)OPERAND_AT(i, pos);
3141 int *unref_counts = 0, nlabels = ISEQ_COMPILE_DATA(iseq)->label_no;
3144 unref_counts =
ALLOCA_N(
int, nlabels);
3145 MEMZERO(unref_counts,
int, nlabels);
3150 if (IS_INSN_ID(i, leave)) {
3154 else if ((lab = find_destination((
INSN *)i)) != 0) {
3155 unref_counts[lab->label_no]++;
3158 else if (IS_LABEL(i)) {
3160 if (lab->unremovable)
return 0;
3161 if (lab->refcnt > unref_counts[lab->label_no]) {
3162 if (i == first)
return 0;
3167 else if (IS_TRACE(i)) {
3170 else if (IS_ADJUST(i)) {
3174 }
while ((i = i->next) != 0);
3179 VALUE insn = INSN_OF(i);
3180 int pos,
len = insn_len(insn);
3181 for (pos = 0; pos <
len; ++pos) {
3182 switch (insn_op_types(insn)[pos]) {
3184 unref_destination((
INSN *)i, pos);
3193 }
while ((i != end) && (i = i->next) != 0);
3200 switch (OPERAND_AT(iobj, 0)) {
3202 ELEM_REMOVE(&iobj->link);
3205 ELEM_REMOVE(&iobj->link);
3208 iobj->insn_id = BIN(adjuststack);
3214is_frozen_putstring(
INSN *insn,
VALUE *op)
3216 if (IS_INSN_ID(insn, putstring) || IS_INSN_ID(insn, putchilledstring)) {
3217 *op = OPERAND_AT(insn, 0);
3220 else if (IS_INSN_ID(insn, putobject)) {
3221 *op = OPERAND_AT(insn, 0);
3232 if (prev->type == ISEQ_ELEMENT_LABEL) {
3234 if (label->refcnt > 0) {
3238 else if (prev->type == ISEQ_ELEMENT_INSN) {
3271 INSN *niobj, *ciobj, *dup = 0;
3275 switch (INSN_OF(iobj)) {
3276 case BIN(putstring):
3277 case BIN(putchilledstring):
3283 case BIN(putobject):
3286 default:
return FALSE;
3289 ciobj = (
INSN *)get_next_insn(iobj);
3290 if (IS_INSN_ID(ciobj, jump)) {
3291 ciobj = (
INSN *)get_next_insn((
INSN*)OPERAND_AT(ciobj, 0));
3293 if (IS_INSN_ID(ciobj, dup)) {
3294 ciobj = (
INSN *)get_next_insn(dup = ciobj);
3296 if (!ciobj || !IS_INSN_ID(ciobj, checktype))
return FALSE;
3297 niobj = (
INSN *)get_next_insn(ciobj);
3302 switch (INSN_OF(niobj)) {
3304 if (OPERAND_AT(ciobj, 0) ==
type) {
3305 dest = (
LABEL *)OPERAND_AT(niobj, 0);
3308 case BIN(branchunless):
3309 if (OPERAND_AT(ciobj, 0) !=
type) {
3310 dest = (
LABEL *)OPERAND_AT(niobj, 0);
3316 line = ciobj->insn_info.line_no;
3317 node_id = ciobj->insn_info.node_id;
3319 if (niobj->link.next && IS_LABEL(niobj->link.next)) {
3320 dest = (
LABEL *)niobj->link.next;
3323 dest = NEW_LABEL(line);
3324 ELEM_INSERT_NEXT(&niobj->link, &dest->link);
3327 INSERT_AFTER_INSN1(iobj, line, node_id, jump, dest);
3329 if (!dup) INSERT_AFTER_INSN(iobj, line, node_id, pop);
3336 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3337 vm_ci_flag(ci) | add,
3347 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3355#define vm_ci_simple(ci) (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)
3363 optimize_checktype(iseq, iobj);
3365 if (IS_INSN_ID(iobj, jump)) {
3366 INSN *niobj, *diobj, *piobj;
3367 diobj = (
INSN *)get_destination_insn(iobj);
3368 niobj = (
INSN *)get_next_insn(iobj);
3370 if (diobj == niobj) {
3377 unref_destination(iobj, 0);
3378 ELEM_REMOVE(&iobj->link);
3381 else if (iobj != diobj && IS_INSN(&diobj->link) &&
3382 IS_INSN_ID(diobj, jump) &&
3383 OPERAND_AT(iobj, 0) != OPERAND_AT(diobj, 0) &&
3384 diobj->insn_info.events == 0) {
3395 if (replace_destination(iobj, diobj)) {
3396 remove_unreachable_chunk(iseq, iobj->link.next);
3400 else if (IS_INSN_ID(diobj, leave)) {
3413 unref_destination(iobj, 0);
3414 iobj->insn_id = BIN(leave);
3415 iobj->operand_size = 0;
3416 iobj->insn_info = diobj->insn_info;
3419 else if (IS_INSN(iobj->link.prev) &&
3420 (piobj = (
INSN *)iobj->link.prev) &&
3421 (IS_INSN_ID(piobj, branchif) ||
3422 IS_INSN_ID(piobj, branchunless))) {
3423 INSN *pdiobj = (
INSN *)get_destination_insn(piobj);
3424 if (niobj == pdiobj) {
3425 int refcnt = IS_LABEL(piobj->link.next) ?
3426 ((
LABEL *)piobj->link.next)->refcnt : 0;
3441 piobj->insn_id = (IS_INSN_ID(piobj, branchif))
3442 ? BIN(branchunless) : BIN(branchif);
3443 if (replace_destination(piobj, iobj) && refcnt <= 1) {
3444 ELEM_REMOVE(&iobj->link);
3451 else if (diobj == pdiobj) {
3465 INSN *popiobj = new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, BIN(pop), 0, 0);
3466 ELEM_REPLACE(&piobj->link, &popiobj->link);
3469 if (remove_unreachable_chunk(iseq, iobj->link.next)) {
3483 if (IS_INSN_ID(iobj, newrange)) {
3484 INSN *
const range = iobj;
3486 VALUE str_beg, str_end;
3488 if ((end = (
INSN *)get_prev_insn(range)) != 0 &&
3489 is_frozen_putstring(end, &str_end) &&
3490 (beg = (
INSN *)get_prev_insn(end)) != 0 &&
3491 is_frozen_putstring(beg, &str_beg) &&
3492 !(insn_has_label_before(&beg->link) || insn_has_label_before(&end->link))) {
3493 int excl =
FIX2INT(OPERAND_AT(range, 0));
3496 ELEM_REMOVE(&beg->link);
3497 ELEM_REMOVE(&end->link);
3498 range->insn_id = BIN(putobject);
3499 OPERAND_AT(range, 0) = lit_range;
3504 if (IS_INSN_ID(iobj, leave)) {
3505 remove_unreachable_chunk(iseq, iobj->link.next);
3517 if (IS_INSN_ID(iobj, duparray)) {
3519 if (IS_INSN(next) && (IS_INSN_ID(next, concatarray) || IS_INSN_ID(next, concattoarray))) {
3520 iobj->insn_id = BIN(putobject);
3530 if (IS_INSN_ID(iobj, duparray)) {
3532 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3536 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3537 VALUE ary = iobj->operands[0];
3540 insn_replace_with_operands(iseq, iobj, BIN(opt_ary_freeze), 2, ary, (
VALUE)ci);
3552 if (IS_INSN_ID(iobj, duphash)) {
3554 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3558 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3559 VALUE hash = iobj->operands[0];
3561 RB_OBJ_SET_SHAREABLE(hash);
3563 insn_replace_with_operands(iseq, iobj, BIN(opt_hash_freeze), 2, hash, (
VALUE)ci);
3575 if (IS_INSN_ID(iobj, newarray) && iobj->operands[0] ==
INT2FIX(0)) {
3577 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3581 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3582 insn_replace_with_operands(iseq, iobj, BIN(opt_ary_freeze), 2, rb_cArray_empty_frozen, (
VALUE)ci);
3594 if (IS_INSN_ID(iobj, newhash) && iobj->operands[0] ==
INT2FIX(0)) {
3596 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3600 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3601 insn_replace_with_operands(iseq, iobj, BIN(opt_hash_freeze), 2, rb_cHash_empty_frozen, (
VALUE)ci);
3607 if (IS_INSN_ID(iobj, branchif) ||
3608 IS_INSN_ID(iobj, branchnil) ||
3609 IS_INSN_ID(iobj, branchunless)) {
3618 INSN *nobj = (
INSN *)get_destination_insn(iobj);
3640 int stop_optimization =
3641 ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq) &&
3642 nobj->link.type == ISEQ_ELEMENT_INSN &&
3643 nobj->insn_info.events;
3644 if (!stop_optimization) {
3645 INSN *pobj = (
INSN *)iobj->link.prev;
3648 if (!IS_INSN(&pobj->link))
3650 else if (IS_INSN_ID(pobj, dup))
3655 if (IS_INSN(&nobj->link) && IS_INSN_ID(nobj, jump)) {
3656 if (!replace_destination(iobj, nobj))
break;
3658 else if (prev_dup && IS_INSN_ID(nobj, dup) &&
3659 !!(nobj = (
INSN *)nobj->link.next) &&
3661 nobj->insn_id == iobj->insn_id) {
3677 if (!replace_destination(iobj, nobj))
break;
3705 if (prev_dup && IS_INSN(pobj->link.prev)) {
3706 pobj = (
INSN *)pobj->link.prev;
3708 if (IS_INSN_ID(pobj, putobject)) {
3709 cond = (IS_INSN_ID(iobj, branchif) ?
3710 OPERAND_AT(pobj, 0) !=
Qfalse :
3711 IS_INSN_ID(iobj, branchunless) ?
3712 OPERAND_AT(pobj, 0) ==
Qfalse :
3715 else if (IS_INSN_ID(pobj, putstring) ||
3716 IS_INSN_ID(pobj, duparray) ||
3717 IS_INSN_ID(pobj, newarray)) {
3718 cond = IS_INSN_ID(iobj, branchif);
3720 else if (IS_INSN_ID(pobj, putnil)) {
3721 cond = !IS_INSN_ID(iobj, branchif);
3724 if (prev_dup || !IS_INSN_ID(pobj, newarray)) {
3725 ELEM_REMOVE(iobj->link.prev);
3727 else if (!iseq_pop_newarray(iseq, pobj)) {
3728 pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(pop), 0, NULL);
3729 ELEM_INSERT_PREV(&iobj->link, &pobj->link);
3733 pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(putnil), 0, NULL);
3734 ELEM_INSERT_NEXT(&iobj->link, &pobj->link);
3736 iobj->insn_id = BIN(jump);
3740 unref_destination(iobj, 0);
3741 ELEM_REMOVE(&iobj->link);
3746 nobj = (
INSN *)get_destination_insn(nobj);
3751 if (IS_INSN_ID(iobj, pop)) {
3759 if (IS_INSN(prev)) {
3760 enum ruby_vminsn_type previ = ((
INSN *)prev)->insn_id;
3761 if (previ == BIN(putobject) || previ == BIN(putnil) ||
3762 previ == BIN(putself) || previ == BIN(putstring) ||
3763 previ == BIN(putchilledstring) ||
3764 previ == BIN(dup) ||
3765 previ == BIN(getlocal) ||
3766 previ == BIN(getblockparam) ||
3767 previ == BIN(getblockparamproxy) ||
3768 previ == BIN(getinstancevariable) ||
3769 previ == BIN(duparray)) {
3773 ELEM_REMOVE(&iobj->link);
3775 else if (previ == BIN(newarray) && iseq_pop_newarray(iseq, (
INSN*)prev)) {
3776 ELEM_REMOVE(&iobj->link);
3778 else if (previ == BIN(concatarray)) {
3780 INSERT_BEFORE_INSN1(piobj, piobj->insn_info.line_no, piobj->insn_info.node_id, splatarray,
Qfalse);
3781 INSN_OF(piobj) = BIN(pop);
3783 else if (previ == BIN(concatstrings)) {
3784 if (OPERAND_AT(prev, 0) ==
INT2FIX(1)) {
3788 ELEM_REMOVE(&iobj->link);
3789 INSN_OF(prev) = BIN(adjuststack);
3795 if (IS_INSN_ID(iobj, newarray) ||
3796 IS_INSN_ID(iobj, duparray) ||
3797 IS_INSN_ID(iobj, concatarray) ||
3798 IS_INSN_ID(iobj, splatarray) ||
3808 if (IS_INSN(next) && IS_INSN_ID(next, splatarray)) {
3814 if (IS_INSN_ID(iobj, newarray)) {
3816 if (IS_INSN(next) && IS_INSN_ID(next, expandarray) &&
3817 OPERAND_AT(next, 1) ==
INT2FIX(0)) {
3819 op1 = OPERAND_AT(iobj, 0);
3820 op2 = OPERAND_AT(next, 0);
3831 INSN_OF(iobj) = BIN(swap);
3832 iobj->operand_size = 0;
3841 INSN_OF(iobj) = BIN(opt_reverse);
3846 INSN_OF(iobj) = BIN(opt_reverse);
3847 OPERAND_AT(iobj, 0) = OPERAND_AT(next, 0);
3857 for (; diff > 0; diff--) {
3858 INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, pop);
3869 for (; diff < 0; diff++) {
3870 INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, putnil);
3877 if (IS_INSN_ID(iobj, duparray)) {
3886 if (IS_INSN(next) && IS_INSN_ID(next, expandarray)) {
3887 INSN_OF(iobj) = BIN(putobject);
3891 if (IS_INSN_ID(iobj, anytostring)) {
3899 if (IS_INSN(next) && IS_INSN_ID(next, concatstrings) &&
3900 OPERAND_AT(next, 0) ==
INT2FIX(1)) {
3905 if (IS_INSN_ID(iobj, putstring) || IS_INSN_ID(iobj, putchilledstring) ||
3913 if (IS_NEXT_INSN_ID(&iobj->link, concatstrings) &&
3914 RSTRING_LEN(OPERAND_AT(iobj, 0)) == 0) {
3915 INSN *next = (
INSN *)iobj->link.next;
3916 if ((OPERAND_AT(next, 0) = FIXNUM_INC(OPERAND_AT(next, 0), -1)) ==
INT2FIX(1)) {
3917 ELEM_REMOVE(&next->link);
3919 ELEM_REMOVE(&iobj->link);
3921 if (IS_NEXT_INSN_ID(&iobj->link, toregexp)) {
3922 INSN *next = (
INSN *)iobj->link.next;
3923 if (OPERAND_AT(next, 1) ==
INT2FIX(1)) {
3924 VALUE src = OPERAND_AT(iobj, 0);
3925 int opt = (int)
FIX2LONG(OPERAND_AT(next, 0));
3926 VALUE path = rb_iseq_path(iseq);
3927 int line = iobj->insn_info.line_no;
3928 VALUE errinfo = rb_errinfo();
3929 VALUE re = rb_reg_compile(src, opt, RSTRING_PTR(path), line);
3931 VALUE message = rb_attr_get(rb_errinfo(), idMesg);
3932 rb_set_errinfo(errinfo);
3933 COMPILE_ERROR(iseq, line,
"%" PRIsVALUE, message);
3936 RB_OBJ_SET_SHAREABLE(re);
3939 ELEM_REMOVE(iobj->link.next);
3944 if (IS_INSN_ID(iobj, concatstrings)) {
3953 if (IS_INSN(next) && IS_INSN_ID(next, jump))
3954 next = get_destination_insn(jump = (
INSN *)next);
3955 if (IS_INSN(next) && IS_INSN_ID(next, concatstrings)) {
3956 int n =
FIX2INT(OPERAND_AT(iobj, 0)) +
FIX2INT(OPERAND_AT(next, 0)) - 1;
3957 OPERAND_AT(iobj, 0) =
INT2FIX(n);
3959 LABEL *label = ((
LABEL *)OPERAND_AT(jump, 0));
3960 if (!--label->refcnt) {
3961 ELEM_REMOVE(&label->link);
3964 label = NEW_LABEL(0);
3965 OPERAND_AT(jump, 0) = (
VALUE)label;
3968 ELEM_INSERT_NEXT(next, &label->link);
3969 CHECK(iseq_peephole_optimize(iseq, get_next_insn(jump), do_tailcallopt));
3977 if (do_tailcallopt &&
3978 (IS_INSN_ID(iobj, send) ||
3979 IS_INSN_ID(iobj, invokesuper))) {
3988 if (iobj->link.next) {
3991 if (!IS_INSN(next)) {
3995 switch (INSN_OF(next)) {
4004 next = get_destination_insn((
INSN *)next);
4018 if (IS_INSN_ID(piobj, send) ||
4019 IS_INSN_ID(piobj, invokesuper)) {
4020 if (OPERAND_AT(piobj, 1) == 0) {
4021 ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
4022 OPERAND_AT(piobj, 0) = (
VALUE)ci;
4027 ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
4028 OPERAND_AT(piobj, 0) = (
VALUE)ci;
4034 if (IS_INSN_ID(iobj, dup)) {
4035 if (IS_NEXT_INSN_ID(&iobj->link, setlocal)) {
4046 if (IS_NEXT_INSN_ID(set1, setlocal)) {
4048 if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
4049 OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
4051 ELEM_REMOVE(&iobj->link);
4064 else if (IS_NEXT_INSN_ID(set1, dup) &&
4065 IS_NEXT_INSN_ID(set1->next, setlocal)) {
4066 set2 = set1->next->next;
4067 if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
4068 OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
4069 ELEM_REMOVE(set1->next);
4083 if (IS_INSN_ID(iobj, getlocal)) {
4085 if (IS_NEXT_INSN_ID(niobj, dup)) {
4086 niobj = niobj->next;
4088 if (IS_NEXT_INSN_ID(niobj, setlocal)) {
4090 if (OPERAND_AT(iobj, 0) == OPERAND_AT(set1, 0) &&
4091 OPERAND_AT(iobj, 1) == OPERAND_AT(set1, 1)) {
4107 if (IS_INSN_ID(iobj, opt_invokebuiltin_delegate)) {
4108 if (IS_TRACE(iobj->link.next)) {
4109 if (IS_NEXT_INSN_ID(iobj->link.next, leave)) {
4110 iobj->insn_id = BIN(opt_invokebuiltin_delegate_leave);
4112 if (iobj == (
INSN *)list && bf->argc == 0 && (ISEQ_BODY(iseq)->builtin_attrs & BUILTIN_ATTR_LEAF)) {
4113 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_SINGLE_NOARG_LEAF;
4126 if (IS_INSN_ID(iobj, getblockparam)) {
4127 if (IS_NEXT_INSN_ID(&iobj->link, branchif) || IS_NEXT_INSN_ID(&iobj->link, branchunless)) {
4128 iobj->insn_id = BIN(getblockparamproxy);
4132 if (IS_INSN_ID(iobj, splatarray) && OPERAND_AT(iobj, 0) ==
false) {
4134 if (IS_NEXT_INSN_ID(niobj, duphash)) {
4135 niobj = niobj->next;
4137 unsigned int set_flags = 0, unset_flags = 0;
4150 if (IS_NEXT_INSN_ID(niobj, send)) {
4151 siobj = niobj->next;
4152 set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT;
4153 unset_flags = VM_CALL_ARGS_BLOCKARG;
4168 else if ((IS_NEXT_INSN_ID(niobj, getlocal) || IS_NEXT_INSN_ID(niobj, getinstancevariable) ||
4169 IS_NEXT_INSN_ID(niobj, getblockparamproxy)) && (IS_NEXT_INSN_ID(niobj->next, send))) {
4170 siobj = niobj->next->next;
4171 set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT|VM_CALL_ARGS_BLOCKARG;
4176 unsigned int flags = vm_ci_flag(ci);
4177 if ((flags & set_flags) == set_flags && !(flags & unset_flags)) {
4178 ((
INSN*)niobj)->insn_id = BIN(putobject);
4179 RB_OBJ_WRITE(iseq, &OPERAND_AT(niobj, 0), RB_OBJ_SET_SHAREABLE(rb_hash_freeze(rb_hash_resurrect(OPERAND_AT(niobj, 0)))));
4181 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
4182 flags & ~VM_CALL_KW_SPLAT_MUT, vm_ci_argc(ci), vm_ci_kwarg(ci));
4184 OPERAND_AT(siobj, 0) = (
VALUE)nci;
4194insn_set_specialized_instruction(
rb_iseq_t *iseq,
INSN *iobj,
int insn_id)
4196 if (insn_id == BIN(opt_neq)) {
4197 VALUE original_ci = iobj->operands[0];
4198 VALUE new_ci = (
VALUE)new_callinfo(iseq, idEq, 1, 0, NULL, FALSE);
4199 insn_replace_with_operands(iseq, iobj, insn_id, 2, new_ci, original_ci);
4202 iobj->insn_id = insn_id;
4203 iobj->operand_size = insn_len(insn_id) - 1;
4213 if (IS_INSN_ID(iobj, newarray) && iobj->link.next &&
4214 IS_INSN(iobj->link.next)) {
4218 INSN *niobj = (
INSN *)iobj->link.next;
4219 if (IS_INSN_ID(niobj, send)) {
4221 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0) {
4223 switch (vm_ci_mid(ci)) {
4225 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_MAX);
4228 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_MIN);
4231 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_HASH);
4236 VALUE num = iobj->operands[0];
4237 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, num, method);
4238 ELEM_REMOVE(&niobj->link);
4243 else if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4245 IS_NEXT_INSN_ID(&niobj->link, send)) {
4247 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idPack) {
4248 VALUE num = iobj->operands[0];
4249 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 1),
INT2FIX(VM_OPT_NEWARRAY_SEND_PACK));
4250 ELEM_REMOVE(&iobj->link);
4251 ELEM_REMOVE(niobj->link.next);
4252 ELEM_INSERT_NEXT(&niobj->link, &iobj->link);
4258 else if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4260 IS_NEXT_INSN_ID(&niobj->link, getlocal) &&
4261 (niobj->link.next && IS_NEXT_INSN_ID(niobj->link.next, send))) {
4264 if (vm_ci_mid(ci) == idPack && vm_ci_argc(ci) == 2 &&
4265 (kwarg && kwarg->keyword_len == 1 && kwarg->keywords[0] ==
rb_id2sym(idBuffer))) {
4266 VALUE num = iobj->operands[0];
4267 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 2),
INT2FIX(VM_OPT_NEWARRAY_SEND_PACK_BUFFER));
4269 ELEM_REMOVE((niobj->link.next)->next);
4271 ELEM_REMOVE(&iobj->link);
4273 ELEM_INSERT_NEXT(niobj->link.next, &iobj->link);
4281 if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4282 IS_INSN_ID(niobj, putobject) ||
4283 IS_INSN_ID(niobj, putself) ||
4284 IS_INSN_ID(niobj, getlocal) ||
4285 IS_INSN_ID(niobj, getinstancevariable)) &&
4286 IS_NEXT_INSN_ID(&niobj->link, send)) {
4293 sendobj = sendobj->next;
4294 ci = (
struct rb_callinfo *)OPERAND_AT(sendobj, 0);
4295 }
while (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && IS_NEXT_INSN_ID(sendobj, send));
4298 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idIncludeP) {
4299 VALUE num = iobj->operands[0];
4301 insn_replace_with_operands(iseq, sendins, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 1),
INT2FIX(VM_OPT_NEWARRAY_SEND_INCLUDE_P));
4303 ELEM_REMOVE(&iobj->link);
4317 if (IS_INSN_ID(iobj, duparray) && iobj->link.next && IS_INSN(iobj->link.next)) {
4318 INSN *niobj = (
INSN *)iobj->link.next;
4319 if ((IS_INSN_ID(niobj, getlocal) ||
4320 IS_INSN_ID(niobj, getinstancevariable) ||
4321 IS_INSN_ID(niobj, putself)) &&
4322 IS_NEXT_INSN_ID(&niobj->link, send)) {
4329 sendobj = sendobj->next;
4330 ci = (
struct rb_callinfo *)OPERAND_AT(sendobj, 0);
4331 }
while (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && IS_NEXT_INSN_ID(sendobj, send));
4333 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idIncludeP) {
4335 VALUE ary = iobj->operands[0];
4339 insn_replace_with_operands(iseq, sendins, BIN(opt_duparray_send), 3, ary,
rb_id2sym(idIncludeP),
INT2FIX(1));
4342 ELEM_REMOVE(&iobj->link);
4349 if (IS_INSN_ID(iobj, send)) {
4353#define SP_INSN(opt) insn_set_specialized_instruction(iseq, iobj, BIN(opt_##opt))
4354 if (vm_ci_simple(ci)) {
4355 switch (vm_ci_argc(ci)) {
4357 switch (vm_ci_mid(ci)) {
4358 case idLength: SP_INSN(length);
return COMPILE_OK;
4359 case idSize: SP_INSN(size);
return COMPILE_OK;
4360 case idEmptyP: SP_INSN(empty_p);
return COMPILE_OK;
4361 case idNilP: SP_INSN(nil_p);
return COMPILE_OK;
4362 case idSucc: SP_INSN(succ);
return COMPILE_OK;
4363 case idNot: SP_INSN(not);
return COMPILE_OK;
4367 switch (vm_ci_mid(ci)) {
4368 case idPLUS: SP_INSN(plus);
return COMPILE_OK;
4369 case idMINUS: SP_INSN(minus);
return COMPILE_OK;
4370 case idMULT: SP_INSN(mult);
return COMPILE_OK;
4371 case idDIV: SP_INSN(div);
return COMPILE_OK;
4372 case idMOD: SP_INSN(mod);
return COMPILE_OK;
4373 case idEq: SP_INSN(eq);
return COMPILE_OK;
4374 case idNeq: SP_INSN(neq);
return COMPILE_OK;
4375 case idEqTilde:SP_INSN(regexpmatch2);
return COMPILE_OK;
4376 case idLT: SP_INSN(lt);
return COMPILE_OK;
4377 case idLE: SP_INSN(le);
return COMPILE_OK;
4378 case idGT: SP_INSN(gt);
return COMPILE_OK;
4379 case idGE: SP_INSN(ge);
return COMPILE_OK;
4380 case idLTLT: SP_INSN(ltlt);
return COMPILE_OK;
4381 case idAREF: SP_INSN(aref);
return COMPILE_OK;
4382 case idAnd: SP_INSN(and);
return COMPILE_OK;
4383 case idOr: SP_INSN(or);
return COMPILE_OK;
4387 switch (vm_ci_mid(ci)) {
4388 case idASET: SP_INSN(aset);
return COMPILE_OK;
4394 if ((vm_ci_flag(ci) & (VM_CALL_ARGS_BLOCKARG | VM_CALL_FORWARDING)) == 0 && blockiseq == NULL) {
4395 iobj->insn_id = BIN(opt_send_without_block);
4396 iobj->operand_size = insn_len(iobj->insn_id) - 1;
4407 switch (ISEQ_BODY(iseq)->
type) {
4409 case ISEQ_TYPE_EVAL:
4410 case ISEQ_TYPE_MAIN:
4412 case ISEQ_TYPE_RESCUE:
4413 case ISEQ_TYPE_ENSURE:
4425 const int do_peepholeopt = ISEQ_COMPILE_DATA(iseq)->option->peephole_optimization;
4426 const int do_tailcallopt = tailcallable_p(iseq) &&
4427 ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization;
4428 const int do_si = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction;
4429 const int do_ou = ISEQ_COMPILE_DATA(iseq)->option->operands_unification;
4430 int rescue_level = 0;
4431 int tailcallopt = do_tailcallopt;
4433 list = FIRST_ELEMENT(anchor);
4435 int do_block_optimization = 0;
4436 LABEL * block_loop_label = NULL;
4439 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_BLOCK) {
4440 do_block_optimization = 1;
4445 if (IS_INSN(le) && IS_INSN_ID((
INSN *)le, nop) && IS_LABEL(le->next)) {
4446 block_loop_label = (
LABEL *)le->next;
4451 if (IS_INSN(list)) {
4452 if (do_peepholeopt) {
4453 iseq_peephole_optimize(iseq, list, tailcallopt);
4456 iseq_specialized_instruction(iseq, (
INSN *)list);
4459 insn_operands_unification((
INSN *)list);
4462 if (do_block_optimization) {
4465 if (IS_INSN_ID(item,
throw)) {
4466 do_block_optimization = 0;
4471 const char *types = insn_op_types(item->insn_id);
4472 for (
int j = 0; types[j]; j++) {
4473 if (types[j] == TS_OFFSET) {
4478 LABEL * target = (
LABEL *)OPERAND_AT(item, j);
4479 if (target == block_loop_label) {
4480 do_block_optimization = 0;
4487 if (IS_LABEL(list)) {
4488 switch (((
LABEL *)list)->rescued) {
4489 case LABEL_RESCUE_BEG:
4491 tailcallopt = FALSE;
4493 case LABEL_RESCUE_END:
4494 if (!--rescue_level) tailcallopt = do_tailcallopt;
4501 if (do_block_optimization) {
4503 if (IS_INSN(le) && IS_INSN_ID((
INSN *)le, nop)) {
4510#if OPT_INSTRUCTIONS_UNIFICATION
4518 VALUE *operands = 0, *ptr = 0;
4522 for (i = 0; i < size; i++) {
4523 iobj = (
INSN *)list;
4524 argc += iobj->operand_size;
4529 ptr = operands = compile_data_alloc2(iseq,
sizeof(
VALUE), argc);
4534 for (i = 0; i < size; i++) {
4535 iobj = (
INSN *)list;
4536 MEMCPY(ptr, iobj->operands,
VALUE, iobj->operand_size);
4537 ptr += iobj->operand_size;
4541 return new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, insn_id, argc, operands);
4553#if OPT_INSTRUCTIONS_UNIFICATION
4559 list = FIRST_ELEMENT(anchor);
4561 if (IS_INSN(list)) {
4562 iobj = (
INSN *)list;
4564 if (unified_insns_data[
id] != 0) {
4565 const int *
const *entry = unified_insns_data[id];
4566 for (j = 1; j < (intptr_t)entry[0]; j++) {
4567 const int *unified = entry[j];
4569 for (k = 2; k < unified[1]; k++) {
4571 ((
INSN *)li)->insn_id != unified[k]) {
4578 new_unified_insn(iseq, unified[0], unified[1] - 1,
4583 niobj->link.next = li;
4602all_string_result_p(
const NODE *node)
4604 if (!node)
return FALSE;
4605 switch (nd_type(node)) {
4606 case NODE_STR:
case NODE_DSTR:
case NODE_FILE:
4608 case NODE_IF:
case NODE_UNLESS:
4609 if (!RNODE_IF(node)->nd_body || !RNODE_IF(node)->nd_else)
return FALSE;
4610 if (all_string_result_p(RNODE_IF(node)->nd_body))
4611 return all_string_result_p(RNODE_IF(node)->nd_else);
4613 case NODE_AND:
case NODE_OR:
4614 if (!RNODE_AND(node)->nd_2nd)
4615 return all_string_result_p(RNODE_AND(node)->nd_1st);
4616 if (!all_string_result_p(RNODE_AND(node)->nd_1st))
4618 return all_string_result_p(RNODE_AND(node)->nd_2nd);
4628 const NODE *lit_node;
4636 VALUE s = rb_str_new_mutable_parser_string(str);
4638 VALUE error = rb_reg_check_preprocess(s);
4639 if (!
NIL_P(error)) {
4640 COMPILE_ERROR(args->iseq, nd_line(node),
"%" PRIsVALUE, error);
4644 if (
NIL_P(args->lit)) {
4646 args->lit_node = node;
4655flush_dstr_fragment(
struct dstr_ctxt *args)
4657 if (!
NIL_P(args->lit)) {
4659 VALUE lit = args->lit;
4661 lit = rb_fstring(lit);
4662 ADD_INSN1(args->ret, args->lit_node, putobject, lit);
4669compile_dstr_fragments_0(
struct dstr_ctxt *args,
const NODE *
const node)
4671 const struct RNode_LIST *list = RNODE_DSTR(node)->nd_next;
4675 CHECK(append_dstr_fragment(args, node, str));
4679 const NODE *
const head = list->nd_head;
4680 if (nd_type_p(head, NODE_STR)) {
4681 CHECK(append_dstr_fragment(args, node, RNODE_STR(head)->
string));
4683 else if (nd_type_p(head, NODE_DSTR)) {
4684 CHECK(compile_dstr_fragments_0(args, head));
4687 flush_dstr_fragment(args);
4689 CHECK(COMPILE(args->ret,
"each string", head));
4701 .iseq = iseq, .ret = ret,
4702 .lit =
Qnil, .lit_node = NULL,
4703 .cnt = 0, .dregx = dregx,
4705 CHECK(compile_dstr_fragments_0(&args, node));
4706 flush_dstr_fragment(&args);
4716 while (node && nd_type_p(node, NODE_BLOCK)) {
4717 CHECK(COMPILE_(ret,
"BLOCK body", RNODE_BLOCK(node)->nd_head,
4718 (RNODE_BLOCK(node)->nd_next ? 1 : popped)));
4719 node = RNODE_BLOCK(node)->nd_next;
4722 CHECK(COMPILE_(ret,
"BLOCK next", RNODE_BLOCK(node)->nd_next, popped));
4731 if (!RNODE_DSTR(node)->nd_next) {
4732 VALUE lit = rb_node_dstr_string_val(node);
4733 ADD_INSN1(ret, node, putstring, lit);
4734 RB_OBJ_SET_SHAREABLE(lit);
4738 CHECK(compile_dstr_fragments(iseq, ret, node, &cnt, FALSE));
4739 ADD_INSN1(ret, node, concatstrings,
INT2FIX(cnt));
4748 int cflag = (int)RNODE_DREGX(node)->as.nd_cflag;
4750 if (!RNODE_DREGX(node)->nd_next) {
4752 VALUE src = rb_node_dregx_string_val(node);
4753 VALUE match = rb_reg_compile(src, cflag, NULL, 0);
4754 RB_OBJ_SET_SHAREABLE(match);
4755 ADD_INSN1(ret, node, putobject, match);
4761 CHECK(compile_dstr_fragments(iseq, ret, node, &cnt, TRUE));
4765 ADD_INSN(ret, node, pop);
4775 const int line = nd_line(node);
4776 LABEL *lend = NEW_LABEL(line);
4777 rb_num_t cnt = ISEQ_FLIP_CNT_INCREMENT(ISEQ_BODY(iseq)->local_iseq)
4778 + VM_SVAR_FLIPFLOP_START;
4781 ADD_INSN2(ret, node, getspecial, key,
INT2FIX(0));
4782 ADD_INSNL(ret, node, branchif, lend);
4785 CHECK(COMPILE(ret,
"flip2 beg", RNODE_FLIP2(node)->nd_beg));
4786 ADD_INSNL(ret, node, branchunless, else_label);
4787 ADD_INSN1(ret, node, putobject,
Qtrue);
4788 ADD_INSN1(ret, node, setspecial, key);
4790 ADD_INSNL(ret, node, jump, then_label);
4794 ADD_LABEL(ret, lend);
4795 CHECK(COMPILE(ret,
"flip2 end", RNODE_FLIP2(node)->nd_end));
4796 ADD_INSNL(ret, node, branchunless, then_label);
4797 ADD_INSN1(ret, node, putobject,
Qfalse);
4798 ADD_INSN1(ret, node, setspecial, key);
4799 ADD_INSNL(ret, node, jump, then_label);
4808#define COMPILE_SINGLE 2
4815 LABEL *label = NEW_LABEL(nd_line(cond));
4816 if (!then_label) then_label = label;
4817 else if (!else_label) else_label = label;
4819 CHECK(compile_branch_condition(iseq, seq, cond, then_label, else_label));
4821 if (LIST_INSN_SIZE_ONE(seq)) {
4822 INSN *insn = (
INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(seq));
4823 if (insn->insn_id == BIN(jump) && (
LABEL *)(insn->operands[0]) == label)
4826 if (!label->refcnt) {
4827 return COMPILE_SINGLE;
4829 ADD_LABEL(seq, label);
4839 DECL_ANCHOR(ignore);
4842 switch (nd_type(cond)) {
4844 CHECK(ok = compile_logical(iseq, ret, RNODE_AND(cond)->nd_1st, NULL, else_label));
4845 cond = RNODE_AND(cond)->nd_2nd;
4846 if (ok == COMPILE_SINGLE) {
4847 INIT_ANCHOR(ignore);
4849 then_label = NEW_LABEL(nd_line(cond));
4853 CHECK(ok = compile_logical(iseq, ret, RNODE_OR(cond)->nd_1st, then_label, NULL));
4854 cond = RNODE_OR(cond)->nd_2nd;
4855 if (ok == COMPILE_SINGLE) {
4856 INIT_ANCHOR(ignore);
4858 else_label = NEW_LABEL(nd_line(cond));
4868 case NODE_IMAGINARY:
4875 ADD_INSNL(ret, cond, jump, then_label);
4880 ADD_INSNL(ret, cond, jump, else_label);
4886 CHECK(COMPILE_POPPED(ret,
"branch condition", cond));
4887 ADD_INSNL(ret, cond, jump, then_label);
4890 CHECK(compile_flip_flop(iseq, ret, cond, TRUE, then_label, else_label));
4893 CHECK(compile_flip_flop(iseq, ret, cond, FALSE, then_label, else_label));
4896 CHECK(compile_defined_expr(iseq, ret, cond,
Qfalse, ret == ignore));
4900 DECL_ANCHOR(cond_seq);
4901 INIT_ANCHOR(cond_seq);
4903 CHECK(COMPILE(cond_seq,
"branch condition", cond));
4905 if (LIST_INSN_SIZE_ONE(cond_seq)) {
4906 INSN *insn = (
INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(cond_seq));
4907 if (insn->insn_id == BIN(putobject)) {
4908 if (
RTEST(insn->operands[0])) {
4909 ADD_INSNL(ret, cond, jump, then_label);
4914 ADD_INSNL(ret, cond, jump, else_label);
4919 ADD_SEQ(ret, cond_seq);
4924 ADD_INSNL(ret, cond, branchunless, else_label);
4925 ADD_INSNL(ret, cond, jump, then_label);
4932keyword_node_p(
const NODE *
const node)
4934 return nd_type_p(node, NODE_HASH) && (RNODE_HASH(node)->nd_brace & HASH_BRACE) != HASH_BRACE;
4940 switch (nd_type(node)) {
4942 return rb_node_sym_string_val(node);
4944 UNKNOWN_NODE(
"get_symbol_value", node,
Qnil);
4951 NODE *node = node_hash->nd_head;
4952 VALUE hash = rb_hash_new();
4955 for (
int i = 0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4956 VALUE key = get_symbol_value(iseq, RNODE_LIST(node)->nd_head);
4957 VALUE idx = rb_hash_aref(hash, key);
4962 rb_hash_aset(hash, key,
INT2FIX(i));
4972 const NODE *
const root_node,
4980 if (RNODE_HASH(root_node)->nd_head && nd_type_p(RNODE_HASH(root_node)->nd_head, NODE_LIST)) {
4981 const NODE *node = RNODE_HASH(root_node)->nd_head;
4985 const NODE *key_node = RNODE_LIST(node)->nd_head;
4989 if (key_node && nd_type_p(key_node, NODE_SYM)) {
4994 *flag |= VM_CALL_KW_SPLAT;
4995 if (seen_nodes > 1 || RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5000 *flag |= VM_CALL_KW_SPLAT_MUT;
5005 node = RNODE_LIST(node)->nd_next;
5006 node = RNODE_LIST(node)->nd_next;
5010 node = RNODE_HASH(root_node)->nd_head;
5013 VALUE key_index = node_hash_unique_key_index(iseq, RNODE_HASH(root_node), &
len);
5016 VALUE *keywords = kw_arg->keywords;
5019 kw_arg->references = 0;
5020 kw_arg->keyword_len =
len;
5022 *kw_arg_ptr = kw_arg;
5024 for (i=0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5025 const NODE *key_node = RNODE_LIST(node)->nd_head;
5026 const NODE *val_node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
5029 keywords[j] = get_symbol_value(iseq, key_node);
5033 NO_CHECK(COMPILE_(ret,
"keyword values", val_node, popped));
5047 for (; node;
len++, node = RNODE_LIST(node)->nd_next) {
5049 EXPECT_NODE(
"compile_args", node, NODE_LIST, -1);
5052 if (RNODE_LIST(node)->nd_next == NULL && keyword_node_p(RNODE_LIST(node)->nd_head)) {
5053 *kwnode_ptr = RNODE_LIST(node)->nd_head;
5056 RUBY_ASSERT(!keyword_node_p(RNODE_LIST(node)->nd_head));
5057 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, FALSE));
5065frozen_string_literal_p(
const rb_iseq_t *iseq)
5067 return ISEQ_COMPILE_DATA(iseq)->option->frozen_string_literal > 0;
5071static_literal_node_p(
const NODE *node,
const rb_iseq_t *iseq,
bool hash_key)
5073 switch (nd_type(node)) {
5081 case NODE_IMAGINARY:
5088 return hash_key || frozen_string_literal_p(iseq);
5097 switch (nd_type(node)) {
5100 VALUE lit = rb_node_integer_literal_val(node);
5106 VALUE lit = rb_node_float_literal_val(node);
5112 case NODE_IMAGINARY:
5121 return rb_node_sym_string_val(node);
5123 return RB_OBJ_SET_SHAREABLE(rb_node_regx_string_val(node));
5125 return rb_node_line_lineno_val(node);
5127 return rb_node_encoding_val(node);
5130 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal ||
RTEST(
ruby_debug)) {
5131 VALUE lit = get_string_value(node);
5132 VALUE str = rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), (
int)nd_line(node));
5133 RB_OBJ_SET_SHAREABLE(str);
5137 return get_string_value(node);
5140 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
5147 const NODE *line_node = node;
5149 if (nd_type_p(node, NODE_ZLIST)) {
5151 ADD_INSN1(ret, line_node, newarray,
INT2FIX(0));
5156 EXPECT_NODE(
"compile_array", node, NODE_LIST, -1);
5159 for (; node; node = RNODE_LIST(node)->nd_next) {
5160 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, popped));
5202 const int max_stack_len = 0x100;
5203 const int min_tmp_ary_len = 0x40;
5207#define FLUSH_CHUNK \
5209 if (first_chunk) ADD_INSN1(ret, line_node, newarray, INT2FIX(stack_len)); \
5210 else ADD_INSN1(ret, line_node, pushtoarray, INT2FIX(stack_len)); \
5211 first_chunk = FALSE; \
5219 if (static_literal_node_p(RNODE_LIST(node)->nd_head, iseq,
false)) {
5221 const NODE *node_tmp = RNODE_LIST(node)->nd_next;
5222 for (; node_tmp && static_literal_node_p(RNODE_LIST(node_tmp)->nd_head, iseq,
false); node_tmp = RNODE_LIST(node_tmp)->nd_next)
5225 if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_ary_len) {
5230 for (; count; count--, node = RNODE_LIST(node)->nd_next)
5231 rb_ary_push(ary, static_literal_value(RNODE_LIST(node)->nd_head, iseq));
5232 RB_OBJ_SET_FROZEN_SHAREABLE(ary);
5237 ADD_INSN1(ret, line_node, duparray, ary);
5238 first_chunk = FALSE;
5241 ADD_INSN1(ret, line_node, putobject, ary);
5242 ADD_INSN(ret, line_node, concattoarray);
5244 RB_OBJ_SET_SHAREABLE(ary);
5250 for (; count; count--, node = RNODE_LIST(node)->nd_next) {
5252 EXPECT_NODE(
"compile_array", node, NODE_LIST, -1);
5255 if (!RNODE_LIST(node)->nd_next && keyword_node_p(RNODE_LIST(node)->nd_head)) {
5257 if (stack_len == 0 && first_chunk) {
5258 ADD_INSN1(ret, line_node, newarray,
INT2FIX(0));
5263 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, 0));
5264 ADD_INSN(ret, line_node, pushtoarraykwsplat);
5268 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, 0));
5273 if (stack_len >= max_stack_len) FLUSH_CHUNK;
5283static_literal_node_pair_p(
const NODE *node,
const rb_iseq_t *iseq)
5285 return RNODE_LIST(node)->nd_head && static_literal_node_p(RNODE_LIST(node)->nd_head, iseq,
true) && static_literal_node_p(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq,
false);
5291 const NODE *line_node = node;
5293 node = RNODE_HASH(node)->nd_head;
5295 if (!node || nd_type_p(node, NODE_ZLIST)) {
5297 ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5302 EXPECT_NODE(
"compile_hash", node, NODE_LIST, -1);
5305 for (; node; node = RNODE_LIST(node)->nd_next) {
5306 NO_CHECK(COMPILE_(ret,
"hash element", RNODE_LIST(node)->nd_head, popped));
5329 const int max_stack_len = 0x100;
5330 const int min_tmp_hash_len = 0x800;
5332 int first_chunk = 1;
5333 DECL_ANCHOR(anchor);
5334 INIT_ANCHOR(anchor);
5337#define FLUSH_CHUNK() \
5339 if (first_chunk) { \
5340 APPEND_LIST(ret, anchor); \
5341 ADD_INSN1(ret, line_node, newhash, INT2FIX(stack_len)); \
5344 ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
5345 ADD_INSN(ret, line_node, swap); \
5346 APPEND_LIST(ret, anchor); \
5347 ADD_SEND(ret, line_node, id_core_hash_merge_ptr, INT2FIX(stack_len + 1)); \
5349 INIT_ANCHOR(anchor); \
5350 first_chunk = stack_len = 0; \
5357 if (static_literal_node_pair_p(node, iseq)) {
5359 const NODE *node_tmp = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5360 for (; node_tmp && static_literal_node_pair_p(node_tmp, iseq); node_tmp = RNODE_LIST(RNODE_LIST(node_tmp)->nd_next)->nd_next)
5363 if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_hash_len) {
5368 for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5370 elem[0] = static_literal_value(RNODE_LIST(node)->nd_head, iseq);
5372 elem[1] = static_literal_value(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq);
5378 hash = RB_OBJ_SET_FROZEN_SHAREABLE(
rb_obj_hide(hash));
5383 ADD_INSN1(ret, line_node, duphash, hash);
5387 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5388 ADD_INSN(ret, line_node, swap);
5390 ADD_INSN1(ret, line_node, putobject, hash);
5392 ADD_SEND(ret, line_node, id_core_hash_merge_kwd,
INT2FIX(2));
5399 for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5402 EXPECT_NODE(
"compile_hash", node, NODE_LIST, -1);
5405 if (RNODE_LIST(node)->nd_head) {
5407 NO_CHECK(COMPILE_(anchor,
"hash key element", RNODE_LIST(node)->nd_head, 0));
5408 NO_CHECK(COMPILE_(anchor,
"hash value element", RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, 0));
5412 if (stack_len >= max_stack_len) FLUSH_CHUNK();
5418 const NODE *kw = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
5419 int empty_kw = nd_type_p(kw, NODE_HASH) && (!RNODE_HASH(kw)->nd_head);
5420 int first_kw = first_chunk && stack_len == 0;
5421 int last_kw = !RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5422 int only_kw = last_kw && first_kw;
5424 empty_kw = empty_kw || nd_type_p(kw, NODE_NIL);
5426 if (only_kw && method_call_keywords) {
5434 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5436 else if (first_kw) {
5440 ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5447 if (only_kw && method_call_keywords) {
5453 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5460 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5461 if (first_kw) ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5462 else ADD_INSN(ret, line_node, swap);
5464 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5466 ADD_SEND(ret, line_node, id_core_hash_merge_kwd,
INT2FIX(2));
5481rb_node_case_when_optimizable_literal(
const NODE *
const node)
5483 switch (nd_type(node)) {
5485 return rb_node_integer_literal_val(node);
5487 VALUE v = rb_node_float_literal_val(node);
5496 case NODE_IMAGINARY:
5505 return rb_node_sym_string_val(node);
5507 return rb_node_line_lineno_val(node);
5509 return rb_node_str_string_val(node);
5511 return rb_node_file_path_val(node);
5518 LABEL *l1,
int only_special_literals,
VALUE literals)
5521 const NODE *val = RNODE_LIST(vals)->nd_head;
5522 VALUE lit = rb_node_case_when_optimizable_literal(val);
5525 only_special_literals = 0;
5527 else if (
NIL_P(rb_hash_lookup(literals, lit))) {
5528 rb_hash_aset(literals, lit, (
VALUE)(l1) | 1);
5531 if (nd_type_p(val, NODE_STR) || nd_type_p(val, NODE_FILE)) {
5532 debugp_param(
"nd_lit", get_string_value(val));
5533 lit = get_string_value(val);
5534 ADD_INSN1(cond_seq, val, putobject, lit);
5538 if (!COMPILE(cond_seq,
"when cond", val))
return -1;
5542 ADD_INSN1(cond_seq, vals, topn,
INT2FIX(1));
5543 ADD_CALL(cond_seq, vals, idEqq,
INT2FIX(1));
5544 ADD_INSNL(cond_seq, val, branchif, l1);
5545 vals = RNODE_LIST(vals)->nd_next;
5547 return only_special_literals;
5552 LABEL *l1,
int only_special_literals,
VALUE literals)
5554 const NODE *line_node = vals;
5556 switch (nd_type(vals)) {
5558 if (when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals) < 0)
5562 ADD_INSN (cond_seq, line_node, dup);
5563 CHECK(COMPILE(cond_seq,
"when splat", RNODE_SPLAT(vals)->nd_head));
5564 ADD_INSN1(cond_seq, line_node, splatarray,
Qfalse);
5565 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5566 ADD_INSNL(cond_seq, line_node, branchif, l1);
5569 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_head, l1, only_special_literals, literals));
5570 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_body, l1, only_special_literals, literals));
5573 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSPUSH(vals)->nd_head, l1, only_special_literals, literals));
5574 ADD_INSN (cond_seq, line_node, dup);
5575 CHECK(COMPILE(cond_seq,
"when argspush body", RNODE_ARGSPUSH(vals)->nd_body));
5576 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
5577 ADD_INSNL(cond_seq, line_node, branchif, l1);
5580 ADD_INSN (cond_seq, line_node, dup);
5581 CHECK(COMPILE(cond_seq,
"when val", vals));
5582 ADD_INSN1(cond_seq, line_node, splatarray,
Qfalse);
5583 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5584 ADD_INSNL(cond_seq, line_node, branchif, l1);
5677 const NODE *line_node;
5692add_masgn_lhs_node(
struct masgn_state *state,
int lhs_pos,
const NODE *line_node,
int argc,
INSN *before_insn)
5695 rb_bug(
"no masgn_state");
5704 memo->before_insn = before_insn;
5705 memo->line_node = line_node;
5706 memo->argn = state->num_args + 1;
5707 memo->num_args = argc;
5708 state->num_args += argc;
5709 memo->lhs_pos = lhs_pos;
5711 if (!state->first_memo) {
5712 state->first_memo = memo;
5715 state->last_memo->next = memo;
5717 state->last_memo = memo;
5727 switch (nd_type(node)) {
5728 case NODE_ATTRASGN: {
5730 const NODE *line_node = node;
5732 CHECK(COMPILE_POPPED(pre,
"masgn lhs (NODE_ATTRASGN)", node));
5734 bool safenav_call =
false;
5736 iobj = (
INSN *)get_prev_insn((
INSN *)insn_element);
5738 ELEM_REMOVE(insn_element);
5739 if (!IS_INSN_ID(iobj, send)) {
5740 safenav_call =
true;
5741 iobj = (
INSN *)get_prev_insn(iobj);
5742 ELEM_INSERT_NEXT(&iobj->link, insn_element);
5744 (pre->last = iobj->link.prev)->next = 0;
5747 int argc = vm_ci_argc(ci) + 1;
5748 ci = ci_argc_set(iseq, ci, argc);
5749 OPERAND_AT(iobj, 0) = (
VALUE)ci;
5753 ADD_INSN(lhs, line_node, swap);
5756 ADD_INSN1(lhs, line_node, topn,
INT2FIX(argc));
5759 if (!add_masgn_lhs_node(state, lhs_pos, line_node, argc, (
INSN *)LAST_ELEMENT(lhs))) {
5763 iobj->link.prev = lhs->last;
5764 lhs->last->next = &iobj->link;
5765 for (lhs->last = &iobj->link; lhs->last->next; lhs->last = lhs->last->next);
5766 if (vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) {
5767 int argc = vm_ci_argc(ci);
5768 bool dupsplat =
false;
5769 ci = ci_argc_set(iseq, ci, argc - 1);
5770 if (!(vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT_MUT)) {
5777 ci = ci_flag_set(iseq, ci, VM_CALL_ARGS_SPLAT_MUT);
5779 OPERAND_AT(iobj, 0) = (
VALUE)ci;
5788 int line_no = nd_line(line_node);
5789 int node_id = nd_node_id(line_node);
5792 INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5793 INSERT_BEFORE_INSN1(iobj, line_no, node_id, splatarray,
Qtrue);
5794 INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5796 INSERT_BEFORE_INSN1(iobj, line_no, node_id, pushtoarray,
INT2FIX(1));
5798 if (!safenav_call) {
5799 ADD_INSN(lhs, line_node, pop);
5801 ADD_INSN(lhs, line_node, pop);
5804 for (
int i=0; i < argc; i++) {
5805 ADD_INSN(post, line_node, pop);
5810 DECL_ANCHOR(nest_rhs);
5811 INIT_ANCHOR(nest_rhs);
5812 DECL_ANCHOR(nest_lhs);
5813 INIT_ANCHOR(nest_lhs);
5815 int prev_level = state->lhs_level;
5816 bool prev_nested = state->nested;
5818 state->lhs_level = lhs_pos - 1;
5819 CHECK(compile_massign0(iseq, pre, nest_rhs, nest_lhs, post, node, state, 1));
5820 state->lhs_level = prev_level;
5821 state->nested = prev_nested;
5823 ADD_SEQ(lhs, nest_rhs);
5824 ADD_SEQ(lhs, nest_lhs);
5828 if (!RNODE_CDECL(node)->nd_vid) {
5832 CHECK(COMPILE_POPPED(pre,
"masgn lhs (NODE_CDECL)", node));
5835 iobj = (
INSN *)insn_element;
5838 ELEM_REMOVE(insn_element);
5839 pre->last = iobj->link.prev;
5842 if (!add_masgn_lhs_node(state, lhs_pos, node, 1, (
INSN *)LAST_ELEMENT(lhs))) {
5846 ADD_INSN(post, node, pop);
5851 DECL_ANCHOR(anchor);
5852 INIT_ANCHOR(anchor);
5853 CHECK(COMPILE_POPPED(anchor,
"masgn lhs", node));
5854 ELEM_REMOVE(FIRST_ELEMENT(anchor));
5855 ADD_SEQ(lhs, anchor);
5866 CHECK(compile_massign_opt_lhs(iseq, ret, RNODE_LIST(lhsn)->nd_next));
5867 CHECK(compile_massign_lhs(iseq, ret, ret, ret, ret, RNODE_LIST(lhsn)->nd_head, NULL, 0));
5874 const NODE *rhsn,
const NODE *orig_lhsn)
5877 const int memsize = numberof(mem);
5879 int llen = 0, rlen = 0;
5881 const NODE *lhsn = orig_lhsn;
5883#define MEMORY(v) { \
5885 if (memindex == memsize) return 0; \
5886 for (i=0; i<memindex; i++) { \
5887 if (mem[i] == (v)) return 0; \
5889 mem[memindex++] = (v); \
5892 if (rhsn == 0 || !nd_type_p(rhsn, NODE_LIST)) {
5897 const NODE *ln = RNODE_LIST(lhsn)->nd_head;
5898 switch (nd_type(ln)) {
5903 MEMORY(get_nd_vid(ln));
5908 lhsn = RNODE_LIST(lhsn)->nd_next;
5914 NO_CHECK(COMPILE_POPPED(ret,
"masgn val (popped)", RNODE_LIST(rhsn)->nd_head));
5917 NO_CHECK(COMPILE(ret,
"masgn val", RNODE_LIST(rhsn)->nd_head));
5919 rhsn = RNODE_LIST(rhsn)->nd_next;
5924 for (i=0; i<llen-rlen; i++) {
5925 ADD_INSN(ret, orig_lhsn, putnil);
5929 compile_massign_opt_lhs(iseq, ret, orig_lhsn);
5936 const NODE *rhsn = RNODE_MASGN(node)->nd_value;
5937 const NODE *splatn = RNODE_MASGN(node)->nd_args;
5938 const NODE *lhsn = RNODE_MASGN(node)->nd_head;
5939 const NODE *lhsn_count = lhsn;
5940 int lhs_splat = (splatn && NODE_NAMED_REST_P(splatn)) ? 1 : 0;
5945 while (lhsn_count) {
5947 lhsn_count = RNODE_LIST(lhsn_count)->nd_next;
5950 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(lhsn)->nd_head, state, (llen - lpos) + lhs_splat + state->lhs_level));
5952 lhsn = RNODE_LIST(lhsn)->nd_next;
5956 if (nd_type_p(splatn, NODE_POSTARG)) {
5958 const NODE *postn = RNODE_POSTARG(splatn)->nd_2nd;
5959 const NODE *restn = RNODE_POSTARG(splatn)->nd_1st;
5960 int plen = (int)RNODE_LIST(postn)->as.nd_alen;
5962 int flag = 0x02 | (NODE_NAMED_REST_P(restn) ? 0x01 : 0x00);
5964 ADD_INSN2(lhs, splatn, expandarray,
INT2FIX(plen),
INT2FIX(flag));
5966 if (NODE_NAMED_REST_P(restn)) {
5967 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, restn, state, 1 + plen + state->lhs_level));
5970 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(postn)->nd_head, state, (plen - ppos) + state->lhs_level));
5972 postn = RNODE_LIST(postn)->nd_next;
5977 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, splatn, state, 1 + state->lhs_level));
5981 if (!state->nested) {
5982 NO_CHECK(COMPILE(rhs,
"normal masgn rhs", rhsn));
5986 ADD_INSN(rhs, node, dup);
5988 ADD_INSN2(rhs, node, expandarray,
INT2FIX(llen),
INT2FIX(lhs_splat));
5995 if (!popped || RNODE_MASGN(node)->nd_args || !compile_massign_opt(iseq, ret, RNODE_MASGN(node)->nd_value, RNODE_MASGN(node)->nd_head)) {
5997 state.lhs_level = popped ? 0 : 1;
6000 state.first_memo = NULL;
6001 state.last_memo = NULL;
6011 int ok = compile_massign0(iseq, pre, rhs, lhs, post, node, &state, popped);
6015 VALUE topn_arg =
INT2FIX((state.num_args - memo->argn) + memo->lhs_pos);
6016 for (
int i = 0; i < memo->num_args; i++) {
6017 INSERT_BEFORE_INSN1(memo->before_insn, nd_line(memo->line_node), nd_node_id(memo->line_node), topn, topn_arg);
6019 tmp_memo = memo->next;
6028 if (!popped && state.num_args >= 1) {
6030 ADD_INSN1(ret, node, setn,
INT2FIX(state.num_args));
6042 switch (nd_type(node)) {
6044 rb_ary_unshift(arr,
ID2SYM(RNODE_CONST(node)->nd_vid));
6045 RB_OBJ_SET_SHAREABLE(arr);
6048 rb_ary_unshift(arr,
ID2SYM(RNODE_COLON3(node)->nd_mid));
6049 rb_ary_unshift(arr,
ID2SYM(idNULL));
6050 RB_OBJ_SET_SHAREABLE(arr);
6053 rb_ary_unshift(arr,
ID2SYM(RNODE_COLON2(node)->nd_mid));
6054 node = RNODE_COLON2(node)->nd_head;
6063compile_const_prefix(
rb_iseq_t *iseq,
const NODE *
const node,
6066 switch (nd_type(node)) {
6068 debugi(
"compile_const_prefix - colon", RNODE_CONST(node)->nd_vid);
6069 ADD_INSN1(body, node, putobject,
Qtrue);
6070 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_CONST(node)->nd_vid));
6073 debugi(
"compile_const_prefix - colon3", RNODE_COLON3(node)->nd_mid);
6074 ADD_INSN(body, node, pop);
6075 ADD_INSN1(body, node, putobject, rb_cObject);
6076 ADD_INSN1(body, node, putobject,
Qtrue);
6077 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_COLON3(node)->nd_mid));
6080 CHECK(compile_const_prefix(iseq, RNODE_COLON2(node)->nd_head, pref, body));
6081 debugi(
"compile_const_prefix - colon2", RNODE_COLON2(node)->nd_mid);
6082 ADD_INSN1(body, node, putobject,
Qfalse);
6083 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_COLON2(node)->nd_mid));
6086 CHECK(COMPILE(pref,
"const colon2 prefix", node));
6095 if (nd_type_p(cpath, NODE_COLON3)) {
6097 ADD_INSN1(ret, cpath, putobject, rb_cObject);
6098 return VM_DEFINECLASS_FLAG_SCOPED;
6100 else if (nd_type_p(cpath, NODE_COLON2) && RNODE_COLON2(cpath)->nd_head) {
6102 NO_CHECK(COMPILE(ret,
"nd_else->nd_head", RNODE_COLON2(cpath)->nd_head));
6103 return VM_DEFINECLASS_FLAG_SCOPED;
6107 ADD_INSN1(ret, cpath, putspecialobject,
6108 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
6114private_recv_p(
const NODE *node)
6116 NODE *recv = get_nd_recv(node);
6117 if (recv && nd_type_p(recv, NODE_SELF)) {
6118 return RNODE_SELF(recv)->nd_state != 0;
6125 const NODE *
const node,
LABEL **lfinish,
VALUE needstr,
bool ignore);
6128compile_call(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
const enum node_type
type,
const NODE *
const line_node,
int popped,
bool assume_receiver);
6135 enum defined_type expr_type = DEFINED_NOT_DEFINED;
6136 enum node_type
type;
6137 const int line = nd_line(node);
6138 const NODE *line_node = node;
6140 switch (
type = nd_type(node)) {
6144 expr_type = DEFINED_NIL;
6147 expr_type = DEFINED_SELF;
6150 expr_type = DEFINED_TRUE;
6153 expr_type = DEFINED_FALSE;
6158 const NODE *vals = (nd_type(node) == NODE_HASH) ? RNODE_HASH(node)->nd_head : node;
6162 if (RNODE_LIST(vals)->nd_head) {
6163 defined_expr0(iseq, ret, RNODE_LIST(vals)->nd_head, lfinish,
Qfalse,
false);
6166 lfinish[1] = NEW_LABEL(line);
6168 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6170 }
while ((vals = RNODE_LIST(vals)->nd_next) != NULL);
6183 case NODE_IMAGINARY:
6188 expr_type = DEFINED_EXPR;
6192 defined_expr0(iseq, ret, RNODE_LIST(node)->nd_head, lfinish,
Qfalse,
false);
6194 lfinish[1] = NEW_LABEL(line);
6196 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6197 expr_type = DEFINED_EXPR;
6203 expr_type = DEFINED_LVAR;
6206#define PUSH_VAL(type) (needstr == Qfalse ? Qtrue : rb_iseq_defined_string(type))
6208 ADD_INSN3(ret, line_node, definedivar,
6209 ID2SYM(RNODE_IVAR(node)->nd_vid), get_ivar_ic_value(iseq,RNODE_IVAR(node)->nd_vid), PUSH_VAL(DEFINED_IVAR));
6213 ADD_INSN(ret, line_node, putnil);
6214 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_GVAR),
6215 ID2SYM(RNODE_GVAR(node)->nd_vid), PUSH_VAL(DEFINED_GVAR));
6219 ADD_INSN(ret, line_node, putnil);
6220 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CVAR),
6221 ID2SYM(RNODE_CVAR(node)->nd_vid), PUSH_VAL(DEFINED_CVAR));
6225 ADD_INSN(ret, line_node, putnil);
6226 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CONST),
6227 ID2SYM(RNODE_CONST(node)->nd_vid), PUSH_VAL(DEFINED_CONST));
6231 lfinish[1] = NEW_LABEL(line);
6233 defined_expr0(iseq, ret, RNODE_COLON2(node)->nd_head, lfinish,
Qfalse,
false);
6234 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6235 NO_CHECK(COMPILE(ret,
"defined/colon2#nd_head", RNODE_COLON2(node)->nd_head));
6238 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CONST_FROM),
6239 ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
6242 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_METHOD),
6243 ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_METHOD));
6247 ADD_INSN1(ret, line_node, putobject, rb_cObject);
6248 ADD_INSN3(ret, line_node, defined,
6249 INT2FIX(DEFINED_CONST_FROM),
ID2SYM(RNODE_COLON3(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
6257 case NODE_ATTRASGN:{
6258 const int explicit_receiver =
6259 (
type == NODE_CALL ||
type == NODE_OPCALL ||
6260 (
type == NODE_ATTRASGN && !private_recv_p(node)));
6262 if (get_nd_args(node) || explicit_receiver) {
6264 lfinish[1] = NEW_LABEL(line);
6267 lfinish[2] = NEW_LABEL(line);
6270 if (get_nd_args(node)) {
6271 defined_expr0(iseq, ret, get_nd_args(node), lfinish,
Qfalse,
false);
6272 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6274 if (explicit_receiver) {
6275 defined_expr0(iseq, ret, get_nd_recv(node), lfinish,
Qfalse,
true);
6276 switch (nd_type(get_nd_recv(node))) {
6282 ADD_INSNL(ret, line_node, branchunless, lfinish[2]);
6283 compile_call(iseq, ret, get_nd_recv(node), nd_type(get_nd_recv(node)), line_node, 0,
true);
6286 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6287 NO_CHECK(COMPILE(ret,
"defined/recv", get_nd_recv(node)));
6291 ADD_INSN(ret, line_node, dup);
6293 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_METHOD),
6294 ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6297 ADD_INSN(ret, line_node, putself);
6299 ADD_INSN(ret, line_node, dup);
6301 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_FUNC),
6302 ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6308 ADD_INSN(ret, line_node, putnil);
6309 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_YIELD), 0,
6310 PUSH_VAL(DEFINED_YIELD));
6311 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
6316 ADD_INSN(ret, line_node, putnil);
6317 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_REF),
6318 INT2FIX((RNODE_BACK_REF(node)->nd_nth << 1) | (
type == NODE_BACK_REF)),
6319 PUSH_VAL(DEFINED_GVAR));
6324 ADD_INSN(ret, line_node, putnil);
6325 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_ZSUPER), 0,
6326 PUSH_VAL(DEFINED_ZSUPER));
6332 case NODE_OP_ASGN_OR:
6333 case NODE_OP_ASGN_AND:
6342 expr_type = DEFINED_ASGN;
6349 VALUE str = rb_iseq_defined_string(expr_type);
6350 ADD_INSN1(ret, line_node, putobject, str);
6353 ADD_INSN1(ret, line_node, putobject,
Qtrue);
6360 ADD_SYNTHETIC_INSN(ret, 0, -1, putnil);
6361 iseq_set_exception_local_table(iseq);
6366 const NODE *
const node,
LABEL **lfinish,
VALUE needstr,
bool ignore)
6369 defined_expr0(iseq, ret, node, lfinish, needstr,
false);
6371 int line = nd_line(node);
6372 LABEL *lstart = NEW_LABEL(line);
6373 LABEL *lend = NEW_LABEL(line);
6376 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq, NULL);
6377 rescue = NEW_CHILD_ISEQ_WITH_CALLBACK(ifunc,
6379 ISEQ_BODY(iseq)->location.label),
6380 ISEQ_TYPE_RESCUE, 0);
6381 lstart->rescued = LABEL_RESCUE_BEG;
6382 lend->rescued = LABEL_RESCUE_END;
6383 APPEND_LABEL(ret, lcur, lstart);
6384 ADD_LABEL(ret, lend);
6386 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lfinish[1]);
6394 const int line = nd_line(node);
6395 const NODE *line_node = node;
6396 if (!RNODE_DEFINED(node)->nd_head) {
6397 VALUE str = rb_iseq_defined_string(DEFINED_NIL);
6398 ADD_INSN1(ret, line_node, putobject, str);
6403 lfinish[0] = NEW_LABEL(line);
6406 defined_expr(iseq, ret, RNODE_DEFINED(node)->nd_head, lfinish, needstr, ignore);
6408 ELEM_INSERT_NEXT(last, &new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(putnil), 0)->link);
6409 ADD_INSN(ret, line_node, swap);
6411 ADD_LABEL(ret, lfinish[2]);
6413 ADD_INSN(ret, line_node, pop);
6414 ADD_LABEL(ret, lfinish[1]);
6416 ADD_LABEL(ret, lfinish[0]);
6422make_name_for_block(
const rb_iseq_t *orig_iseq)
6427 if (ISEQ_BODY(orig_iseq)->parent_iseq != 0) {
6428 while (ISEQ_BODY(orig_iseq)->local_iseq != iseq) {
6429 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_BLOCK) {
6432 iseq = ISEQ_BODY(iseq)->parent_iseq;
6437 return rb_sprintf(
"block in %"PRIsVALUE, ISEQ_BODY(iseq)->location.label);
6440 return rb_sprintf(
"block (%d levels) in %"PRIsVALUE, level, ISEQ_BODY(iseq)->location.label);
6449 enl->ensure_node = node;
6450 enl->prev = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6452 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl;
6462 while (erange->next != 0) {
6463 erange = erange->next;
6467 ne->end = erange->end;
6468 erange->end = lstart;
6474can_add_ensure_iseq(
const rb_iseq_t *iseq)
6477 if (ISEQ_COMPILE_DATA(iseq)->in_rescue && (e = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack) != NULL) {
6479 if (e->ensure_node)
return false;
6492 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6494 DECL_ANCHOR(ensure);
6496 INIT_ANCHOR(ensure);
6498 if (enlp->erange != NULL) {
6499 DECL_ANCHOR(ensure_part);
6500 LABEL *lstart = NEW_LABEL(0);
6501 LABEL *lend = NEW_LABEL(0);
6502 INIT_ANCHOR(ensure_part);
6504 add_ensure_range(iseq, enlp->erange, lstart, lend);
6506 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enlp->prev;
6507 ADD_LABEL(ensure_part, lstart);
6508 NO_CHECK(COMPILE_POPPED(ensure_part,
"ensure part", enlp->ensure_node));
6509 ADD_LABEL(ensure_part, lend);
6510 ADD_SEQ(ensure, ensure_part);
6519 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = prev_enlp;
6520 ADD_SEQ(ret, ensure);
6525check_keyword(
const NODE *node)
6529 if (nd_type_p(node, NODE_LIST)) {
6530 while (RNODE_LIST(node)->nd_next) {
6531 node = RNODE_LIST(node)->nd_next;
6533 node = RNODE_LIST(node)->nd_head;
6536 return keyword_node_p(node);
6541keyword_node_single_splat_p(
NODE *kwnode)
6545 NODE *node = RNODE_HASH(kwnode)->nd_head;
6546 return RNODE_LIST(node)->nd_head == NULL &&
6547 RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next == NULL;
6552 NODE *kwnode,
unsigned int *flag_ptr)
6554 *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6555 ADD_INSN1(args, argn, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6556 ADD_INSN1(args, argn, newhash,
INT2FIX(0));
6557 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6558 ADD_SEND(args, argn, id_core_hash_merge_kwd,
INT2FIX(2));
6561#define SPLATARRAY_FALSE 0
6562#define SPLATARRAY_TRUE 1
6563#define DUP_SINGLE_KW_SPLAT 2
6567 unsigned int *dup_rest,
unsigned int *flag_ptr,
struct rb_callinfo_kwarg **kwarg_ptr)
6569 if (!argn)
return 0;
6571 NODE *kwnode = NULL;
6573 switch (nd_type(argn)) {
6576 int len = compile_args(iseq, args, argn, &kwnode);
6577 RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_ARGS_SPLAT) == 0);
6580 if (compile_keyword_arg(iseq, args, kwnode, kwarg_ptr, flag_ptr)) {
6584 if (keyword_node_single_splat_p(kwnode) && (*dup_rest & DUP_SINGLE_KW_SPLAT)) {
6585 compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6588 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6597 NO_CHECK(COMPILE(args,
"args (splat)", RNODE_SPLAT(argn)->nd_head));
6598 ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6599 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6600 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6601 RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_KW_SPLAT) == 0);
6604 case NODE_ARGSCAT: {
6605 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6606 int argc = setup_args_core(iseq, args, RNODE_ARGSCAT(argn)->nd_head, dup_rest, NULL, NULL);
6607 bool args_pushed =
false;
6609 if (nd_type_p(RNODE_ARGSCAT(argn)->nd_body, NODE_LIST)) {
6610 int rest_len = compile_args(iseq, args, RNODE_ARGSCAT(argn)->nd_body, &kwnode);
6611 if (kwnode) rest_len--;
6612 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(rest_len));
6616 RUBY_ASSERT(!check_keyword(RNODE_ARGSCAT(argn)->nd_body));
6617 NO_CHECK(COMPILE(args,
"args (cat: splat)", RNODE_ARGSCAT(argn)->nd_body));
6620 if (nd_type_p(RNODE_ARGSCAT(argn)->nd_head, NODE_LIST)) {
6621 ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6622 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6625 else if (!args_pushed) {
6626 ADD_INSN(args, argn, concattoarray);
6632 *flag_ptr |= VM_CALL_KW_SPLAT;
6633 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6639 case NODE_ARGSPUSH: {
6640 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6641 int argc = setup_args_core(iseq, args, RNODE_ARGSPUSH(argn)->nd_head, dup_rest, NULL, NULL);
6643 if (nd_type_p(RNODE_ARGSPUSH(argn)->nd_body, NODE_LIST)) {
6644 int rest_len = compile_args(iseq, args, RNODE_ARGSPUSH(argn)->nd_body, &kwnode);
6645 if (kwnode) rest_len--;
6646 ADD_INSN1(args, argn, newarray,
INT2FIX(rest_len));
6647 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(1));
6650 if (keyword_node_p(RNODE_ARGSPUSH(argn)->nd_body)) {
6651 kwnode = RNODE_ARGSPUSH(argn)->nd_body;
6654 NO_CHECK(COMPILE(args,
"args (cat: splat)", RNODE_ARGSPUSH(argn)->nd_body));
6655 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(1));
6661 *flag_ptr |= VM_CALL_KW_SPLAT;
6662 if (!keyword_node_single_splat_p(kwnode)) {
6663 *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6664 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6666 else if (*dup_rest & DUP_SINGLE_KW_SPLAT) {
6667 compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6670 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6678 UNKNOWN_NODE(
"setup_arg", argn,
Qnil);
6684setup_args_splat_mut(
unsigned int *flag,
int dup_rest,
int initial_dup_rest)
6686 if ((*flag & VM_CALL_ARGS_SPLAT) && dup_rest != initial_dup_rest) {
6687 *flag |= VM_CALL_ARGS_SPLAT_MUT;
6692setup_args_dup_rest_p(
const NODE *argn)
6694 switch(nd_type(argn)) {
6705 case NODE_IMAGINARY:
6718 return setup_args_dup_rest_p(RNODE_COLON2(argn)->nd_head);
6721 if (setup_args_dup_rest_p(RNODE_LIST(argn)->nd_head)) {
6724 argn = RNODE_LIST(argn)->nd_next;
6737 unsigned int dup_rest = SPLATARRAY_TRUE, initial_dup_rest;
6740 const NODE *check_arg = nd_type_p(argn, NODE_BLOCK_PASS) ?
6741 RNODE_BLOCK_PASS(argn)->nd_head : argn;
6744 switch(nd_type(check_arg)) {
6747 dup_rest = SPLATARRAY_FALSE;
6751 dup_rest = !nd_type_p(RNODE_ARGSCAT(check_arg)->nd_head, NODE_LIST);
6753 case(NODE_ARGSPUSH):
6755 dup_rest = !((nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_SPLAT) ||
6756 (nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_ARGSCAT) &&
6757 nd_type_p(RNODE_ARGSCAT(RNODE_ARGSPUSH(check_arg)->nd_head)->nd_head, NODE_LIST))) &&
6758 nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_body, NODE_HASH) &&
6759 !RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_brace);
6761 if (dup_rest == SPLATARRAY_FALSE) {
6763 NODE *node = RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_head;
6765 NODE *key_node = RNODE_LIST(node)->nd_head;
6766 if (key_node && setup_args_dup_rest_p(key_node)) {
6767 dup_rest = SPLATARRAY_TRUE;
6771 node = RNODE_LIST(node)->nd_next;
6772 NODE *value_node = RNODE_LIST(node)->nd_head;
6773 if (setup_args_dup_rest_p(value_node)) {
6774 dup_rest = SPLATARRAY_TRUE;
6778 node = RNODE_LIST(node)->nd_next;
6787 if (check_arg != argn && setup_args_dup_rest_p(RNODE_BLOCK_PASS(argn)->nd_body)) {
6789 dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
6792 initial_dup_rest = dup_rest;
6794 if (argn && nd_type_p(argn, NODE_BLOCK_PASS)) {
6795 DECL_ANCHOR(arg_block);
6796 INIT_ANCHOR(arg_block);
6798 if (RNODE_BLOCK_PASS(argn)->forwarding && ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
6799 int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size;
6801 RUBY_ASSERT(nd_type_p(RNODE_BLOCK_PASS(argn)->nd_head, NODE_ARGSPUSH));
6802 const NODE * arg_node =
6803 RNODE_ARGSPUSH(RNODE_BLOCK_PASS(argn)->nd_head)->nd_head;
6810 if (nd_type_p(arg_node, NODE_ARGSCAT)) {
6811 argc += setup_args_core(iseq, args, RNODE_ARGSCAT(arg_node)->nd_head, &dup_rest, flag, keywords);
6814 *flag |= VM_CALL_FORWARDING;
6816 ADD_GETLOCAL(args, argn, idx, get_lvar_level(iseq));
6817 setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6821 *flag |= VM_CALL_ARGS_BLOCKARG;
6823 NO_CHECK(COMPILE(arg_block,
"block", RNODE_BLOCK_PASS(argn)->nd_body));
6826 if (LIST_INSN_SIZE_ONE(arg_block)) {
6828 if (IS_INSN(elem)) {
6830 if (iobj->insn_id == BIN(getblockparam)) {
6831 iobj->insn_id = BIN(getblockparamproxy);
6835 ret =
INT2FIX(setup_args_core(iseq, args, RNODE_BLOCK_PASS(argn)->nd_head, &dup_rest, flag, keywords));
6836 ADD_SEQ(args, arg_block);
6839 ret =
INT2FIX(setup_args_core(iseq, args, argn, &dup_rest, flag, keywords));
6841 setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6848 const NODE *body = ptr;
6849 int line = nd_line(body);
6851 const rb_iseq_t *block = NEW_CHILD_ISEQ(body, make_name_for_block(ISEQ_BODY(iseq)->parent_iseq), ISEQ_TYPE_BLOCK, line);
6853 ADD_INSN1(ret, body, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6854 ADD_CALL_WITH_BLOCK(ret, body, id_core_set_postexe, argc, block);
6856 iseq_set_local_table(iseq, 0, 0);
6864 int line = nd_line(node);
6865 const NODE *line_node = node;
6866 LABEL *fail_label = NEW_LABEL(line), *end_label = NEW_LABEL(line);
6868#if !(defined(NAMED_CAPTURE_BY_SVAR) && NAMED_CAPTURE_BY_SVAR-0)
6869 ADD_INSN1(ret, line_node, getglobal,
ID2SYM(idBACKREF));
6873 ADD_INSN(ret, line_node, dup);
6874 ADD_INSNL(ret, line_node, branchunless, fail_label);
6876 for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6878 if (RNODE_BLOCK(vars)->nd_next) {
6879 ADD_INSN(ret, line_node, dup);
6882 NO_CHECK(COMPILE_POPPED(ret,
"capture", RNODE_BLOCK(vars)->nd_head));
6884 cap = new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), idAREF,
INT2FIX(1),
6887#if !defined(NAMED_CAPTURE_SINGLE_OPT) || NAMED_CAPTURE_SINGLE_OPT-0
6888 if (!RNODE_BLOCK(vars)->nd_next && vars == node) {
6893 ADD_INSNL(nom, line_node, jump, end_label);
6894 ADD_LABEL(nom, fail_label);
6896 ADD_INSN(nom, line_node, pop);
6897 ADD_INSN(nom, line_node, putnil);
6899 ADD_LABEL(nom, end_label);
6900 (nom->last->next = cap->link.next)->prev = nom->last;
6901 (cap->link.next = nom->anchor.next)->prev = &cap->link;
6906 ADD_INSNL(ret, line_node, jump, end_label);
6907 ADD_LABEL(ret, fail_label);
6908 ADD_INSN(ret, line_node, pop);
6909 for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6911 NO_CHECK(COMPILE_POPPED(ret,
"capture", RNODE_BLOCK(vars)->nd_head));
6913 ((
INSN*)last)->insn_id = BIN(putnil);
6914 ((
INSN*)last)->operand_size = 0;
6916 ADD_LABEL(ret, end_label);
6920optimizable_range_item_p(
const NODE *n)
6922 if (!n)
return FALSE;
6923 switch (nd_type(n)) {
6936optimized_range_item(
const NODE *n)
6938 switch (nd_type(n)) {
6940 return rb_node_line_lineno_val(n);
6942 return rb_node_integer_literal_val(n);
6944 return rb_node_float_literal_val(n);
6946 return rb_node_rational_literal_val(n);
6947 case NODE_IMAGINARY:
6948 return rb_node_imaginary_literal_val(n);
6952 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(n)));
6959 const NODE *
const node_body =
type == NODE_IF ? RNODE_IF(node)->nd_body : RNODE_UNLESS(node)->nd_else;
6960 const NODE *
const node_else =
type == NODE_IF ? RNODE_IF(node)->nd_else : RNODE_UNLESS(node)->nd_body;
6962 const int line = nd_line(node);
6963 const NODE *line_node = node;
6964 DECL_ANCHOR(cond_seq);
6965 LABEL *then_label, *else_label, *end_label;
6968 INIT_ANCHOR(cond_seq);
6969 then_label = NEW_LABEL(line);
6970 else_label = NEW_LABEL(line);
6973 NODE *cond = RNODE_IF(node)->nd_cond;
6974 if (nd_type(cond) == NODE_BLOCK) {
6975 cond = RNODE_BLOCK(cond)->nd_head;
6978 CHECK(compile_branch_condition(iseq, cond_seq, cond, then_label, else_label));
6979 ADD_SEQ(ret, cond_seq);
6981 if (then_label->refcnt && else_label->refcnt) {
6982 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
type == NODE_IF ?
"if" :
"unless");
6985 if (then_label->refcnt) {
6986 ADD_LABEL(ret, then_label);
6988 DECL_ANCHOR(then_seq);
6989 INIT_ANCHOR(then_seq);
6990 CHECK(COMPILE_(then_seq,
"then", node_body, popped));
6992 if (else_label->refcnt) {
6993 const NODE *
const coverage_node = node_body ? node_body : node;
6994 add_trace_branch_coverage(
6997 nd_code_loc(coverage_node),
6998 nd_node_id(coverage_node),
7000 type == NODE_IF ?
"then" :
"else",
7002 end_label = NEW_LABEL(line);
7003 ADD_INSNL(then_seq, line_node, jump, end_label);
7005 ADD_INSN(then_seq, line_node, pop);
7008 ADD_SEQ(ret, then_seq);
7011 if (else_label->refcnt) {
7012 ADD_LABEL(ret, else_label);
7014 DECL_ANCHOR(else_seq);
7015 INIT_ANCHOR(else_seq);
7016 CHECK(COMPILE_(else_seq,
"else", node_else, popped));
7018 if (then_label->refcnt) {
7019 const NODE *
const coverage_node = node_else ? node_else : node;
7020 add_trace_branch_coverage(
7023 nd_code_loc(coverage_node),
7024 nd_node_id(coverage_node),
7026 type == NODE_IF ?
"else" :
"then",
7029 ADD_SEQ(ret, else_seq);
7033 ADD_LABEL(ret, end_label);
7043 const NODE *node = orig_node;
7044 LABEL *endlabel, *elselabel;
7046 DECL_ANCHOR(body_seq);
7047 DECL_ANCHOR(cond_seq);
7048 int only_special_literals = 1;
7049 VALUE literals = rb_hash_new();
7051 enum node_type
type;
7052 const NODE *line_node;
7057 INIT_ANCHOR(body_seq);
7058 INIT_ANCHOR(cond_seq);
7060 RHASH_TBL_RAW(literals)->type = &cdhash_type;
7062 CHECK(COMPILE(head,
"case base", RNODE_CASE(node)->nd_head));
7064 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"case");
7066 node = RNODE_CASE(node)->nd_body;
7067 EXPECT_NODE(
"NODE_CASE", node, NODE_WHEN, COMPILE_NG);
7068 type = nd_type(node);
7069 line = nd_line(node);
7072 endlabel = NEW_LABEL(line);
7073 elselabel = NEW_LABEL(line);
7077 while (
type == NODE_WHEN) {
7080 l1 = NEW_LABEL(line);
7081 ADD_LABEL(body_seq, l1);
7082 ADD_INSN(body_seq, line_node, pop);
7084 const NODE *
const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
7085 add_trace_branch_coverage(
7088 nd_code_loc(coverage_node),
7089 nd_node_id(coverage_node),
7094 CHECK(COMPILE_(body_seq,
"when body", RNODE_WHEN(node)->nd_body, popped));
7095 ADD_INSNL(body_seq, line_node, jump, endlabel);
7097 vals = RNODE_WHEN(node)->nd_head;
7099 switch (nd_type(vals)) {
7101 only_special_literals = when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals);
7102 if (only_special_literals < 0)
return COMPILE_NG;
7107 only_special_literals = 0;
7108 CHECK(when_splat_vals(iseq, cond_seq, vals, l1, only_special_literals, literals));
7111 UNKNOWN_NODE(
"NODE_CASE", vals, COMPILE_NG);
7115 EXPECT_NODE_NONULL(
"NODE_CASE", node, NODE_LIST, COMPILE_NG);
7118 node = RNODE_WHEN(node)->nd_next;
7122 type = nd_type(node);
7123 line = nd_line(node);
7128 ADD_LABEL(cond_seq, elselabel);
7129 ADD_INSN(cond_seq, line_node, pop);
7130 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id,
"else", branches);
7131 CHECK(COMPILE_(cond_seq,
"else", node, popped));
7132 ADD_INSNL(cond_seq, line_node, jump, endlabel);
7135 debugs(
"== else (implicit)\n");
7136 ADD_LABEL(cond_seq, elselabel);
7137 ADD_INSN(cond_seq, orig_node, pop);
7138 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id,
"else", branches);
7140 ADD_INSN(cond_seq, orig_node, putnil);
7142 ADD_INSNL(cond_seq, orig_node, jump, endlabel);
7145 if (only_special_literals && ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
7146 ADD_INSN(ret, orig_node, dup);
7148 ADD_INSN2(ret, orig_node, opt_case_dispatch, literals, elselabel);
7150 LABEL_REF(elselabel);
7153 ADD_SEQ(ret, cond_seq);
7154 ADD_SEQ(ret, body_seq);
7155 ADD_LABEL(ret, endlabel);
7164 const NODE *node = RNODE_CASE2(orig_node)->nd_body;
7166 DECL_ANCHOR(body_seq);
7170 branches = decl_branch_base(iseq, PTR2NUM(orig_node), nd_code_loc(orig_node),
"case");
7172 INIT_ANCHOR(body_seq);
7173 endlabel = NEW_LABEL(nd_line(node));
7175 while (node && nd_type_p(node, NODE_WHEN)) {
7176 const int line = nd_line(node);
7177 LABEL *l1 = NEW_LABEL(line);
7178 ADD_LABEL(body_seq, l1);
7180 const NODE *
const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
7181 add_trace_branch_coverage(
7184 nd_code_loc(coverage_node),
7185 nd_node_id(coverage_node),
7190 CHECK(COMPILE_(body_seq,
"when", RNODE_WHEN(node)->nd_body, popped));
7191 ADD_INSNL(body_seq, node, jump, endlabel);
7193 vals = RNODE_WHEN(node)->nd_head;
7195 EXPECT_NODE_NONULL(
"NODE_WHEN", node, NODE_LIST, COMPILE_NG);
7197 switch (nd_type(vals)) {
7201 val = RNODE_LIST(vals)->nd_head;
7202 lnext = NEW_LABEL(nd_line(val));
7203 debug_compile(
"== when2\n", (
void)0);
7204 CHECK(compile_branch_condition(iseq, ret, val, l1, lnext));
7205 ADD_LABEL(ret, lnext);
7206 vals = RNODE_LIST(vals)->nd_next;
7212 ADD_INSN(ret, vals, putnil);
7213 CHECK(COMPILE(ret,
"when2/cond splat", vals));
7214 ADD_INSN1(ret, vals, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_WHEN | VM_CHECKMATCH_ARRAY));
7215 ADD_INSNL(ret, vals, branchif, l1);
7218 UNKNOWN_NODE(
"NODE_WHEN", vals, COMPILE_NG);
7220 node = RNODE_WHEN(node)->nd_next;
7223 const NODE *
const coverage_node = node ? node : orig_node;
7224 add_trace_branch_coverage(
7227 nd_code_loc(coverage_node),
7228 nd_node_id(coverage_node),
7232 CHECK(COMPILE_(ret,
"else", node, popped));
7233 ADD_INSNL(ret, orig_node, jump, endlabel);
7235 ADD_SEQ(ret, body_seq);
7236 ADD_LABEL(ret, endlabel);
7240static int iseq_compile_pattern_match(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache);
7242static int iseq_compile_pattern_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *match_failed,
bool in_single_pattern,
int base_index);
7243static int iseq_compile_array_deconstruct(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *deconstruct,
LABEL *deconstructed,
LABEL *match_failed,
LABEL *type_error,
bool in_single_pattern,
int base_index,
bool use_deconstructed_cache);
7244static int iseq_compile_pattern_set_general_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
VALUE errmsg,
int base_index);
7245static int iseq_compile_pattern_set_length_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
VALUE errmsg,
VALUE pattern_length,
int base_index);
7246static int iseq_compile_pattern_set_eqq_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
int base_index);
7248#define CASE3_BI_OFFSET_DECONSTRUCTED_CACHE 0
7249#define CASE3_BI_OFFSET_ERROR_STRING 1
7250#define CASE3_BI_OFFSET_KEY_ERROR_P 2
7251#define CASE3_BI_OFFSET_KEY_ERROR_MATCHEE 3
7252#define CASE3_BI_OFFSET_KEY_ERROR_KEY 4
7255iseq_compile_pattern_each(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *matched,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache)
7257 const int line = nd_line(node);
7258 const NODE *line_node = node;
7260 switch (nd_type(node)) {
7314 const NODE *args = RNODE_ARYPTN(node)->pre_args;
7315 const int pre_args_num = RNODE_ARYPTN(node)->pre_args ?
rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->pre_args)->as.nd_alen) : 0;
7316 const int post_args_num = RNODE_ARYPTN(node)->post_args ?
rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->post_args)->as.nd_alen) : 0;
7318 const int min_argc = pre_args_num + post_args_num;
7319 const int use_rest_num = RNODE_ARYPTN(node)->rest_arg && (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) ||
7320 (!NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) && post_args_num > 0));
7322 LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7324 match_failed = NEW_LABEL(line);
7325 type_error = NEW_LABEL(line);
7326 deconstruct = NEW_LABEL(line);
7327 deconstructed = NEW_LABEL(line);
7330 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7331 ADD_INSN(ret, line_node, swap);
7337 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7339 CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7341 ADD_INSN(ret, line_node, dup);
7342 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7343 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7344 ADD_SEND(ret, line_node, RNODE_ARYPTN(node)->rest_arg ? idGE : idEq,
INT2FIX(1));
7345 if (in_single_pattern) {
7346 CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node,
7347 RNODE_ARYPTN(node)->rest_arg ? rb_fstring_lit(
"%p length mismatch (given %p, expected %p+)") :
7348 rb_fstring_lit(
"%p length mismatch (given %p, expected %p)"),
7349 INT2FIX(min_argc), base_index + 1 ));
7351 ADD_INSNL(ret, line_node, branchunless, match_failed);
7353 for (i = 0; i < pre_args_num; i++) {
7354 ADD_INSN(ret, line_node, dup);
7355 ADD_INSN1(ret, line_node, putobject,
INT2FIX(i));
7356 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7357 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7358 args = RNODE_LIST(args)->nd_next;
7361 if (RNODE_ARYPTN(node)->rest_arg) {
7362 if (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg)) {
7363 ADD_INSN(ret, line_node, dup);
7364 ADD_INSN1(ret, line_node, putobject,
INT2FIX(pre_args_num));
7365 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7366 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7367 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7368 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7369 ADD_INSN1(ret, line_node, setn,
INT2FIX(4));
7370 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7372 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_ARYPTN(node)->rest_arg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7375 if (post_args_num > 0) {
7376 ADD_INSN(ret, line_node, dup);
7377 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7378 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7379 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7380 ADD_INSN1(ret, line_node, setn,
INT2FIX(2));
7381 ADD_INSN(ret, line_node, pop);
7386 args = RNODE_ARYPTN(node)->post_args;
7387 for (i = 0; i < post_args_num; i++) {
7388 ADD_INSN(ret, line_node, dup);
7390 ADD_INSN1(ret, line_node, putobject,
INT2FIX(pre_args_num + i));
7391 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7392 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7394 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7395 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7396 args = RNODE_LIST(args)->nd_next;
7399 ADD_INSN(ret, line_node, pop);
7401 ADD_INSN(ret, line_node, pop);
7403 ADD_INSNL(ret, line_node, jump, matched);
7404 ADD_INSN(ret, line_node, putnil);
7406 ADD_INSN(ret, line_node, putnil);
7409 ADD_LABEL(ret, type_error);
7410 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7412 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct must return Array"));
7413 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7414 ADD_INSN(ret, line_node, pop);
7416 ADD_LABEL(ret, match_failed);
7417 ADD_INSN(ret, line_node, pop);
7419 ADD_INSN(ret, line_node, pop);
7421 ADD_INSNL(ret, line_node, jump, unmatched);
7474 const NODE *args = RNODE_FNDPTN(node)->args;
7475 const int args_num = RNODE_FNDPTN(node)->args ?
rb_long2int(RNODE_LIST(RNODE_FNDPTN(node)->args)->as.nd_alen) : 0;
7477 LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7478 match_failed = NEW_LABEL(line);
7479 type_error = NEW_LABEL(line);
7480 deconstruct = NEW_LABEL(line);
7481 deconstructed = NEW_LABEL(line);
7483 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7485 CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7487 ADD_INSN(ret, line_node, dup);
7488 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7489 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7490 ADD_SEND(ret, line_node, idGE,
INT2FIX(1));
7491 if (in_single_pattern) {
7492 CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node, rb_fstring_lit(
"%p length mismatch (given %p, expected %p+)"),
INT2FIX(args_num), base_index + 1 ));
7494 ADD_INSNL(ret, line_node, branchunless, match_failed);
7497 LABEL *while_begin = NEW_LABEL(nd_line(node));
7498 LABEL *next_loop = NEW_LABEL(nd_line(node));
7499 LABEL *find_succeeded = NEW_LABEL(line);
7500 LABEL *find_failed = NEW_LABEL(nd_line(node));
7503 ADD_INSN(ret, line_node, dup);
7504 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7506 ADD_INSN(ret, line_node, dup);
7507 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7508 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7510 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7512 ADD_LABEL(ret, while_begin);
7514 ADD_INSN(ret, line_node, dup);
7515 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7516 ADD_SEND(ret, line_node, idLE,
INT2FIX(1));
7517 ADD_INSNL(ret, line_node, branchunless, find_failed);
7519 for (j = 0; j < args_num; j++) {
7520 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7521 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7523 ADD_INSN1(ret, line_node, putobject,
INT2FIX(j));
7524 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7526 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7528 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, next_loop, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7529 args = RNODE_LIST(args)->nd_next;
7532 if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->pre_rest_arg)) {
7533 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7534 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7535 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7536 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7537 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->pre_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7539 if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->post_rest_arg)) {
7540 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7541 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7542 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7543 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7544 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7545 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7546 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->post_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7548 ADD_INSNL(ret, line_node, jump, find_succeeded);
7550 ADD_LABEL(ret, next_loop);
7551 ADD_INSN1(ret, line_node, putobject,
INT2FIX(1));
7552 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7553 ADD_INSNL(ret, line_node, jump, while_begin);
7555 ADD_LABEL(ret, find_failed);
7556 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(3));
7557 if (in_single_pattern) {
7558 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7559 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"%p does not match to find pattern"));
7560 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7561 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(2));
7562 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7564 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7565 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7567 ADD_INSN(ret, line_node, pop);
7568 ADD_INSN(ret, line_node, pop);
7570 ADD_INSNL(ret, line_node, jump, match_failed);
7571 ADD_INSN1(ret, line_node, dupn,
INT2FIX(3));
7573 ADD_LABEL(ret, find_succeeded);
7574 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(3));
7577 ADD_INSN(ret, line_node, pop);
7578 ADD_INSNL(ret, line_node, jump, matched);
7579 ADD_INSN(ret, line_node, putnil);
7581 ADD_LABEL(ret, type_error);
7582 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7584 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct must return Array"));
7585 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7586 ADD_INSN(ret, line_node, pop);
7588 ADD_LABEL(ret, match_failed);
7589 ADD_INSN(ret, line_node, pop);
7590 ADD_INSNL(ret, line_node, jump, unmatched);
7654 LABEL *match_failed, *type_error;
7657 match_failed = NEW_LABEL(line);
7658 type_error = NEW_LABEL(line);
7660 if (RNODE_HSHPTN(node)->nd_pkwargs && !RNODE_HSHPTN(node)->nd_pkwrestarg) {
7661 const NODE *kw_args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7662 keys =
rb_ary_new_capa(kw_args ? RNODE_LIST(kw_args)->as.nd_alen/2 : 0);
7664 rb_ary_push(keys, get_symbol_value(iseq, RNODE_LIST(kw_args)->nd_head));
7665 kw_args = RNODE_LIST(RNODE_LIST(kw_args)->nd_next)->nd_next;
7669 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7671 ADD_INSN(ret, line_node, dup);
7672 ADD_INSN1(ret, line_node, putobject,
ID2SYM(rb_intern(
"deconstruct_keys")));
7673 ADD_SEND(ret, line_node, idRespond_to,
INT2FIX(1));
7674 if (in_single_pattern) {
7675 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p does not respond to #deconstruct_keys"), base_index + 1 ));
7677 ADD_INSNL(ret, line_node, branchunless, match_failed);
7680 ADD_INSN(ret, line_node, putnil);
7683 RB_OBJ_SET_FROZEN_SHAREABLE(keys);
7684 ADD_INSN1(ret, line_node, duparray, keys);
7687 ADD_SEND(ret, line_node, rb_intern(
"deconstruct_keys"),
INT2FIX(1));
7689 ADD_INSN(ret, line_node, dup);
7691 ADD_INSNL(ret, line_node, branchunless, type_error);
7693 if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7694 ADD_SEND(ret, line_node, rb_intern(
"dup"),
INT2FIX(0));
7697 if (RNODE_HSHPTN(node)->nd_pkwargs) {
7701 args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7703 DECL_ANCHOR(match_values);
7704 INIT_ANCHOR(match_values);
7705 keys_num =
rb_long2int(RNODE_LIST(args)->as.nd_alen) / 2;
7706 for (i = 0; i < keys_num; i++) {
7707 NODE *key_node = RNODE_LIST(args)->nd_head;
7708 NODE *value_node = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_head;
7709 VALUE key = get_symbol_value(iseq, key_node);
7711 ADD_INSN(ret, line_node, dup);
7712 ADD_INSN1(ret, line_node, putobject, key);
7713 ADD_SEND(ret, line_node, rb_intern(
"key?"),
INT2FIX(1));
7714 if (in_single_pattern) {
7715 LABEL *match_succeeded;
7716 match_succeeded = NEW_LABEL(line);
7718 ADD_INSN(ret, line_node, dup);
7719 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7722 ADD_INSN1(ret, line_node, putobject, RB_OBJ_SET_SHAREABLE(str));
7723 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 2 ));
7724 ADD_INSN1(ret, line_node, putobject,
Qtrue);
7725 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 3 ));
7726 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7727 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4 ));
7728 ADD_INSN1(ret, line_node, putobject, key);
7729 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_KEY + 5 ));
7731 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(4));
7733 ADD_LABEL(ret, match_succeeded);
7735 ADD_INSNL(ret, line_node, branchunless, match_failed);
7737 ADD_INSN(match_values, line_node, dup);
7738 ADD_INSN1(match_values, line_node, putobject, key);
7739 ADD_SEND(match_values, line_node, RNODE_HSHPTN(node)->nd_pkwrestarg ? rb_intern(
"delete") : idAREF,
INT2FIX(1));
7740 CHECK(iseq_compile_pattern_match(iseq, match_values, value_node, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7741 args = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_next;
7743 ADD_SEQ(ret, match_values);
7747 ADD_INSN(ret, line_node, dup);
7748 ADD_SEND(ret, line_node, idEmptyP,
INT2FIX(0));
7749 if (in_single_pattern) {
7750 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p is not empty"), base_index + 1 ));
7752 ADD_INSNL(ret, line_node, branchunless, match_failed);
7755 if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7756 if (RNODE_HSHPTN(node)->nd_pkwrestarg == NODE_SPECIAL_NO_REST_KEYWORD) {
7757 ADD_INSN(ret, line_node, dup);
7758 ADD_SEND(ret, line_node, idEmptyP,
INT2FIX(0));
7759 if (in_single_pattern) {
7760 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"rest of %p is not empty"), base_index + 1 ));
7762 ADD_INSNL(ret, line_node, branchunless, match_failed);
7765 ADD_INSN(ret, line_node, dup);
7766 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_HSHPTN(node)->nd_pkwrestarg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7770 ADD_INSN(ret, line_node, pop);
7771 ADD_INSNL(ret, line_node, jump, matched);
7772 ADD_INSN(ret, line_node, putnil);
7774 ADD_LABEL(ret, type_error);
7775 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7777 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct_keys must return Hash"));
7778 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7779 ADD_INSN(ret, line_node, pop);
7781 ADD_LABEL(ret, match_failed);
7782 ADD_INSN(ret, line_node, pop);
7783 ADD_INSNL(ret, line_node, jump, unmatched);
7792 case NODE_IMAGINARY:
7820 CHECK(COMPILE(ret,
"case in literal", node));
7821 if (in_single_pattern) {
7822 ADD_INSN1(ret, line_node, dupn,
INT2FIX(2));
7824 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
7825 if (in_single_pattern) {
7826 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 2 ));
7828 ADD_INSNL(ret, line_node, branchif, matched);
7829 ADD_INSNL(ret, line_node, jump, unmatched);
7833 ID id = RNODE_LASGN(node)->nd_vid;
7834 int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
7836 if (in_alt_pattern) {
7837 const char *name = rb_id2name(
id);
7838 if (name && strlen(name) > 0 && name[0] !=
'_') {
7839 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
7845 ADD_SETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
7846 ADD_INSNL(ret, line_node, jump, matched);
7851 ID id = RNODE_DASGN(node)->nd_vid;
7853 idx = get_dyna_var_idx(iseq,
id, &lv, &ls);
7855 if (in_alt_pattern) {
7856 const char *name = rb_id2name(
id);
7857 if (name && strlen(name) > 0 && name[0] !=
'_') {
7858 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
7865 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
7869 ADD_SETLOCAL(ret, line_node, ls - idx, lv);
7870 ADD_INSNL(ret, line_node, jump, matched);
7875 LABEL *match_failed;
7876 match_failed = unmatched;
7877 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_IF(node)->nd_body, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7878 CHECK(COMPILE(ret,
"case in if", RNODE_IF(node)->nd_cond));
7879 if (in_single_pattern) {
7880 LABEL *match_succeeded;
7881 match_succeeded = NEW_LABEL(line);
7883 ADD_INSN(ret, line_node, dup);
7884 if (nd_type_p(node, NODE_IF)) {
7885 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7888 ADD_INSNL(ret, line_node, branchunless, match_succeeded);
7891 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"guard clause does not return true"));
7892 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7893 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7894 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7896 ADD_INSN(ret, line_node, pop);
7897 ADD_INSN(ret, line_node, pop);
7899 ADD_LABEL(ret, match_succeeded);
7901 if (nd_type_p(node, NODE_IF)) {
7902 ADD_INSNL(ret, line_node, branchunless, match_failed);
7905 ADD_INSNL(ret, line_node, branchif, match_failed);
7907 ADD_INSNL(ret, line_node, jump, matched);
7912 LABEL *match_failed;
7913 match_failed = NEW_LABEL(line);
7915 n = RNODE_HASH(node)->nd_head;
7916 if (! (nd_type_p(n, NODE_LIST) && RNODE_LIST(n)->as.nd_alen == 2)) {
7917 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
7921 ADD_INSN(ret, line_node, dup);
7922 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(n)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 , use_deconstructed_cache));
7923 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head, matched, match_failed, in_single_pattern, in_alt_pattern, base_index,
false));
7924 ADD_INSN(ret, line_node, putnil);
7926 ADD_LABEL(ret, match_failed);
7927 ADD_INSN(ret, line_node, pop);
7928 ADD_INSNL(ret, line_node, jump, unmatched);
7932 LABEL *match_succeeded, *fin;
7933 match_succeeded = NEW_LABEL(line);
7934 fin = NEW_LABEL(line);
7936 ADD_INSN(ret, line_node, dup);
7937 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_1st, match_succeeded, fin, in_single_pattern,
true, base_index + 1 , use_deconstructed_cache));
7938 ADD_LABEL(ret, match_succeeded);
7939 ADD_INSN(ret, line_node, pop);
7940 ADD_INSNL(ret, line_node, jump, matched);
7941 ADD_INSN(ret, line_node, putnil);
7942 ADD_LABEL(ret, fin);
7943 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_2nd, matched, unmatched, in_single_pattern,
true, base_index, use_deconstructed_cache));
7947 UNKNOWN_NODE(
"NODE_IN", node, COMPILE_NG);
7953iseq_compile_pattern_match(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache)
7955 LABEL *fin = NEW_LABEL(nd_line(node));
7956 CHECK(iseq_compile_pattern_each(iseq, ret, node, fin, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7957 ADD_LABEL(ret, fin);
7962iseq_compile_pattern_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *match_failed,
bool in_single_pattern,
int base_index)
7964 const NODE *line_node = node;
7966 if (RNODE_ARYPTN(node)->nd_pconst) {
7967 ADD_INSN(ret, line_node, dup);
7968 CHECK(COMPILE(ret,
"constant", RNODE_ARYPTN(node)->nd_pconst));
7969 if (in_single_pattern) {
7970 ADD_INSN1(ret, line_node, dupn,
INT2FIX(2));
7972 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
7973 if (in_single_pattern) {
7974 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 3 ));
7976 ADD_INSNL(ret, line_node, branchunless, match_failed);
7983iseq_compile_array_deconstruct(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *deconstruct,
LABEL *deconstructed,
LABEL *match_failed,
LABEL *type_error,
bool in_single_pattern,
int base_index,
bool use_deconstructed_cache)
7985 const NODE *line_node = node;
7989 if (use_deconstructed_cache) {
7991 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
7992 ADD_INSNL(ret, line_node, branchnil, deconstruct);
7995 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
7996 ADD_INSNL(ret, line_node, branchunless, match_failed);
7999 ADD_INSN(ret, line_node, pop);
8000 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE - 1 ));
8001 ADD_INSNL(ret, line_node, jump, deconstructed);
8004 ADD_INSNL(ret, line_node, jump, deconstruct);
8007 ADD_LABEL(ret, deconstruct);
8008 ADD_INSN(ret, line_node, dup);
8009 ADD_INSN1(ret, line_node, putobject,
ID2SYM(rb_intern(
"deconstruct")));
8010 ADD_SEND(ret, line_node, idRespond_to,
INT2FIX(1));
8013 if (use_deconstructed_cache) {
8014 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE + 1 ));
8017 if (in_single_pattern) {
8018 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p does not respond to #deconstruct"), base_index + 1 ));
8021 ADD_INSNL(ret, line_node, branchunless, match_failed);
8023 ADD_SEND(ret, line_node, rb_intern(
"deconstruct"),
INT2FIX(0));
8026 if (use_deconstructed_cache) {
8027 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
8030 ADD_INSN(ret, line_node, dup);
8032 ADD_INSNL(ret, line_node, branchunless, type_error);
8034 ADD_LABEL(ret, deconstructed);
8050 const int line = nd_line(node);
8051 const NODE *line_node = node;
8052 LABEL *match_succeeded = NEW_LABEL(line);
8054 ADD_INSN(ret, line_node, dup);
8055 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8057 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8058 ADD_INSN1(ret, line_node, putobject, errmsg);
8059 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8060 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(2));
8061 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8063 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8064 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
8066 ADD_INSN(ret, line_node, pop);
8067 ADD_INSN(ret, line_node, pop);
8068 ADD_LABEL(ret, match_succeeded);
8084 const int line = nd_line(node);
8085 const NODE *line_node = node;
8086 LABEL *match_succeeded = NEW_LABEL(line);
8088 ADD_INSN(ret, line_node, dup);
8089 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8091 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8092 ADD_INSN1(ret, line_node, putobject, errmsg);
8093 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8094 ADD_INSN(ret, line_node, dup);
8095 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
8096 ADD_INSN1(ret, line_node, putobject, pattern_length);
8097 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(4));
8098 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8100 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8101 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2));
8103 ADD_INSN(ret, line_node, pop);
8104 ADD_INSN(ret, line_node, pop);
8105 ADD_LABEL(ret, match_succeeded);
8111iseq_compile_pattern_set_eqq_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
int base_index)
8121 const int line = nd_line(node);
8122 const NODE *line_node = node;
8123 LABEL *match_succeeded = NEW_LABEL(line);
8125 ADD_INSN(ret, line_node, dup);
8126 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8128 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8129 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"%p === %p does not return true"));
8130 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8131 ADD_INSN1(ret, line_node, topn,
INT2FIX(5));
8132 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(3));
8133 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8135 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8136 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
8138 ADD_INSN(ret, line_node, pop);
8139 ADD_INSN(ret, line_node, pop);
8141 ADD_LABEL(ret, match_succeeded);
8142 ADD_INSN1(ret, line_node, setn,
INT2FIX(2));
8143 ADD_INSN(ret, line_node, pop);
8144 ADD_INSN(ret, line_node, pop);
8152 const NODE *pattern;
8153 const NODE *node = orig_node;
8154 LABEL *endlabel, *elselabel;
8156 DECL_ANCHOR(body_seq);
8157 DECL_ANCHOR(cond_seq);
8159 enum node_type
type;
8160 const NODE *line_node;
8163 bool single_pattern;
8166 INIT_ANCHOR(body_seq);
8167 INIT_ANCHOR(cond_seq);
8169 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"case");
8171 node = RNODE_CASE3(node)->nd_body;
8172 EXPECT_NODE(
"NODE_CASE3", node, NODE_IN, COMPILE_NG);
8173 type = nd_type(node);
8174 line = nd_line(node);
8176 single_pattern = !RNODE_IN(node)->nd_next;
8178 endlabel = NEW_LABEL(line);
8179 elselabel = NEW_LABEL(line);
8181 if (single_pattern) {
8183 ADD_INSN(head, line_node, putnil);
8184 ADD_INSN(head, line_node, putnil);
8185 ADD_INSN1(head, line_node, putobject,
Qfalse);
8186 ADD_INSN(head, line_node, putnil);
8188 ADD_INSN(head, line_node, putnil);
8190 CHECK(COMPILE(head,
"case base", RNODE_CASE3(orig_node)->nd_head));
8194 while (
type == NODE_IN) {
8198 ADD_INSN(body_seq, line_node, putnil);
8200 l1 = NEW_LABEL(line);
8201 ADD_LABEL(body_seq, l1);
8202 ADD_INSN1(body_seq, line_node, adjuststack,
INT2FIX(single_pattern ? 6 : 2));
8204 const NODE *
const coverage_node = RNODE_IN(node)->nd_body ? RNODE_IN(node)->nd_body : node;
8205 add_trace_branch_coverage(
8208 nd_code_loc(coverage_node),
8209 nd_node_id(coverage_node),
8214 CHECK(COMPILE_(body_seq,
"in body", RNODE_IN(node)->nd_body, popped));
8215 ADD_INSNL(body_seq, line_node, jump, endlabel);
8217 pattern = RNODE_IN(node)->nd_head;
8219 int pat_line = nd_line(pattern);
8220 LABEL *next_pat = NEW_LABEL(pat_line);
8221 ADD_INSN (cond_seq, pattern, dup);
8223 CHECK(iseq_compile_pattern_each(iseq, cond_seq, pattern, l1, next_pat, single_pattern,
false, 2,
true));
8224 ADD_LABEL(cond_seq, next_pat);
8225 LABEL_UNREMOVABLE(next_pat);
8228 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
8232 node = RNODE_IN(node)->nd_next;
8236 type = nd_type(node);
8237 line = nd_line(node);
8242 ADD_LABEL(cond_seq, elselabel);
8243 ADD_INSN(cond_seq, line_node, pop);
8244 ADD_INSN(cond_seq, line_node, pop);
8245 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id,
"else", branches);
8246 CHECK(COMPILE_(cond_seq,
"else", node, popped));
8247 ADD_INSNL(cond_seq, line_node, jump, endlabel);
8248 ADD_INSN(cond_seq, line_node, putnil);
8250 ADD_INSN(cond_seq, line_node, putnil);
8254 debugs(
"== else (implicit)\n");
8255 ADD_LABEL(cond_seq, elselabel);
8256 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id,
"else", branches);
8257 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8259 if (single_pattern) {
8267 LABEL *key_error, *fin;
8270 key_error = NEW_LABEL(line);
8271 fin = NEW_LABEL(line);
8274 kw_arg->references = 0;
8275 kw_arg->keyword_len = 2;
8276 kw_arg->keywords[0] =
ID2SYM(rb_intern(
"matchee"));
8277 kw_arg->keywords[1] =
ID2SYM(rb_intern(
"key"));
8279 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_P + 2));
8280 ADD_INSNL(cond_seq, orig_node, branchif, key_error);
8282 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8283 ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit(
"%p: %s"));
8284 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(4));
8285 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8286 ADD_SEND(cond_seq, orig_node, id_core_sprintf,
INT2FIX(3));
8287 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(2));
8288 ADD_INSNL(cond_seq, orig_node, jump, fin);
8290 ADD_LABEL(cond_seq, key_error);
8292 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8293 ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit(
"%p: %s"));
8294 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(4));
8295 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8296 ADD_SEND(cond_seq, orig_node, id_core_sprintf,
INT2FIX(3));
8297 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4));
8298 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_KEY + 5));
8299 ADD_SEND_R(cond_seq, orig_node, rb_intern(
"new"),
INT2FIX(1), NULL,
INT2FIX(VM_CALL_KWARG), kw_arg);
8300 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(1));
8302 ADD_LABEL(cond_seq, fin);
8306 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(2));
8307 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(2));
8309 ADD_INSN1(cond_seq, orig_node, adjuststack,
INT2FIX(single_pattern ? 7 : 3));
8311 ADD_INSN(cond_seq, orig_node, putnil);
8313 ADD_INSNL(cond_seq, orig_node, jump, endlabel);
8314 ADD_INSN1(cond_seq, orig_node, dupn,
INT2FIX(single_pattern ? 5 : 1));
8316 ADD_INSN(cond_seq, line_node, putnil);
8320 ADD_SEQ(ret, cond_seq);
8321 ADD_SEQ(ret, body_seq);
8322 ADD_LABEL(ret, endlabel);
8326#undef CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
8327#undef CASE3_BI_OFFSET_ERROR_STRING
8328#undef CASE3_BI_OFFSET_KEY_ERROR_P
8329#undef CASE3_BI_OFFSET_KEY_ERROR_MATCHEE
8330#undef CASE3_BI_OFFSET_KEY_ERROR_KEY
8335 const int line = (int)nd_line(node);
8336 const NODE *line_node = node;
8338 LABEL *prev_start_label = ISEQ_COMPILE_DATA(iseq)->start_label;
8339 LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
8340 LABEL *prev_redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label;
8341 int prev_loopval_popped = ISEQ_COMPILE_DATA(iseq)->loopval_popped;
8346 LABEL *next_label = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(line);
8347 LABEL *redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label = NEW_LABEL(line);
8348 LABEL *break_label = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(line);
8349 LABEL *end_label = NEW_LABEL(line);
8350 LABEL *adjust_label = NEW_LABEL(line);
8352 LABEL *next_catch_label = NEW_LABEL(line);
8353 LABEL *tmp_label = NULL;
8355 ISEQ_COMPILE_DATA(iseq)->loopval_popped = 0;
8356 push_ensure_entry(iseq, &enl, NULL, NULL);
8358 if (RNODE_WHILE(node)->nd_state == 1) {
8359 ADD_INSNL(ret, line_node, jump, next_label);
8362 tmp_label = NEW_LABEL(line);
8363 ADD_INSNL(ret, line_node, jump, tmp_label);
8365 ADD_LABEL(ret, adjust_label);
8366 ADD_INSN(ret, line_node, putnil);
8367 ADD_LABEL(ret, next_catch_label);
8368 ADD_INSN(ret, line_node, pop);
8369 ADD_INSNL(ret, line_node, jump, next_label);
8370 if (tmp_label) ADD_LABEL(ret, tmp_label);
8372 ADD_LABEL(ret, redo_label);
8373 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
type == NODE_WHILE ?
"while" :
"until");
8375 const NODE *
const coverage_node = RNODE_WHILE(node)->nd_body ? RNODE_WHILE(node)->nd_body : node;
8376 add_trace_branch_coverage(
8379 nd_code_loc(coverage_node),
8380 nd_node_id(coverage_node),
8385 CHECK(COMPILE_POPPED(ret,
"while body", RNODE_WHILE(node)->nd_body));
8386 ADD_LABEL(ret, next_label);
8388 if (
type == NODE_WHILE) {
8389 CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8390 redo_label, end_label));
8394 CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8395 end_label, redo_label));
8398 ADD_LABEL(ret, end_label);
8399 ADD_ADJUST_RESTORE(ret, adjust_label);
8401 if (UNDEF_P(RNODE_WHILE(node)->nd_state)) {
8403 COMPILE_ERROR(ERROR_ARGS
"unsupported: putundef");
8407 ADD_INSN(ret, line_node, putnil);
8410 ADD_LABEL(ret, break_label);
8413 ADD_INSN(ret, line_node, pop);
8416 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, redo_label, break_label, NULL,
8418 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, redo_label, break_label, NULL,
8420 ADD_CATCH_ENTRY(CATCH_TYPE_REDO, redo_label, break_label, NULL,
8421 ISEQ_COMPILE_DATA(iseq)->redo_label);
8423 ISEQ_COMPILE_DATA(iseq)->start_label = prev_start_label;
8424 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
8425 ISEQ_COMPILE_DATA(iseq)->redo_label = prev_redo_label;
8426 ISEQ_COMPILE_DATA(iseq)->loopval_popped = prev_loopval_popped;
8427 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->prev;
8434 const int line = nd_line(node);
8435 const NODE *line_node = node;
8436 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
8437 LABEL *retry_label = NEW_LABEL(line);
8438 LABEL *retry_end_l = NEW_LABEL(line);
8441 ADD_LABEL(ret, retry_label);
8442 if (nd_type_p(node, NODE_FOR)) {
8443 CHECK(COMPILE(ret,
"iter caller (for)", RNODE_FOR(node)->nd_iter));
8445 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8446 NEW_CHILD_ISEQ(RNODE_FOR(node)->nd_body, make_name_for_block(iseq),
8447 ISEQ_TYPE_BLOCK, line);
8448 ADD_SEND_WITH_BLOCK(ret, line_node, idEach,
INT2FIX(0), child_iseq);
8451 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8452 NEW_CHILD_ISEQ(RNODE_ITER(node)->nd_body, make_name_for_block(iseq),
8453 ISEQ_TYPE_BLOCK, line);
8454 CHECK(COMPILE(ret,
"iter caller", RNODE_ITER(node)->nd_iter));
8468 iobj = IS_INSN(last_elem) ? (
INSN*) last_elem : (
INSN*) get_prev_insn((
INSN*) last_elem);
8469 while (!IS_INSN_ID(iobj, send) && !IS_INSN_ID(iobj, invokesuper) && !IS_INSN_ID(iobj, sendforward) && !IS_INSN_ID(iobj, invokesuperforward)) {
8470 iobj = (
INSN*) get_prev_insn(iobj);
8472 ELEM_INSERT_NEXT(&iobj->link, (
LINK_ELEMENT*) retry_end_l);
8476 if (&iobj->link == LAST_ELEMENT(ret)) {
8482 ADD_INSN(ret, line_node, pop);
8485 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
8487 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, child_iseq, retry_end_l);
8497 const NODE *line_node = node;
8498 const NODE *var = RNODE_FOR_MASGN(node)->nd_var;
8499 LABEL *not_single = NEW_LABEL(nd_line(var));
8500 LABEL *not_ary = NEW_LABEL(nd_line(var));
8501 CHECK(COMPILE(ret,
"for var", var));
8502 ADD_INSN(ret, line_node, dup);
8503 ADD_CALL(ret, line_node, idLength,
INT2FIX(0));
8504 ADD_INSN1(ret, line_node, putobject,
INT2FIX(1));
8505 ADD_CALL(ret, line_node, idEq,
INT2FIX(1));
8506 ADD_INSNL(ret, line_node, branchunless, not_single);
8507 ADD_INSN(ret, line_node, dup);
8508 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
8509 ADD_CALL(ret, line_node, idAREF,
INT2FIX(1));
8510 ADD_INSN1(ret, line_node, putobject,
rb_cArray);
8511 ADD_INSN(ret, line_node, swap);
8512 ADD_CALL(ret, line_node, rb_intern(
"try_convert"),
INT2FIX(1));
8513 ADD_INSN(ret, line_node, dup);
8514 ADD_INSNL(ret, line_node, branchunless, not_ary);
8515 ADD_INSN(ret, line_node, swap);
8516 ADD_LABEL(ret, not_ary);
8517 ADD_INSN(ret, line_node, pop);
8518 ADD_LABEL(ret, not_single);
8525 const NODE *line_node = node;
8526 unsigned long throw_flag = 0;
8528 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8530 LABEL *splabel = NEW_LABEL(0);
8531 ADD_LABEL(ret, splabel);
8532 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8533 CHECK(COMPILE_(ret,
"break val (while/until)", RNODE_BREAK(node)->nd_stts,
8534 ISEQ_COMPILE_DATA(iseq)->loopval_popped));
8535 add_ensure_iseq(ret, iseq, 0);
8536 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8537 ADD_ADJUST_RESTORE(ret, splabel);
8540 ADD_INSN(ret, line_node, putnil);
8547 if (!ISEQ_COMPILE_DATA(ip)) {
8552 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8553 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8555 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8558 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8559 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with break");
8563 ip = ISEQ_BODY(ip)->parent_iseq;
8568 CHECK(COMPILE(ret,
"break val (block)", RNODE_BREAK(node)->nd_stts));
8569 ADD_INSN1(ret, line_node,
throw,
INT2FIX(throw_flag | TAG_BREAK));
8571 ADD_INSN(ret, line_node, pop);
8575 COMPILE_ERROR(ERROR_ARGS
"Invalid break");
8584 const NODE *line_node = node;
8585 unsigned long throw_flag = 0;
8587 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8588 LABEL *splabel = NEW_LABEL(0);
8589 debugs(
"next in while loop\n");
8590 ADD_LABEL(ret, splabel);
8591 CHECK(COMPILE(ret,
"next val/valid syntax?", RNODE_NEXT(node)->nd_stts));
8592 add_ensure_iseq(ret, iseq, 0);
8593 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8594 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8595 ADD_ADJUST_RESTORE(ret, splabel);
8597 ADD_INSN(ret, line_node, putnil);
8600 else if (ISEQ_COMPILE_DATA(iseq)->end_label && can_add_ensure_iseq(iseq)) {
8601 LABEL *splabel = NEW_LABEL(0);
8602 debugs(
"next in block\n");
8603 ADD_LABEL(ret, splabel);
8604 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8605 CHECK(COMPILE(ret,
"next val", RNODE_NEXT(node)->nd_stts));
8606 add_ensure_iseq(ret, iseq, 0);
8607 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8608 ADD_ADJUST_RESTORE(ret, splabel);
8611 ADD_INSN(ret, line_node, putnil);
8618 if (!ISEQ_COMPILE_DATA(ip)) {
8623 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8624 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8628 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8631 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8632 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with next");
8636 ip = ISEQ_BODY(ip)->parent_iseq;
8639 CHECK(COMPILE(ret,
"next val", RNODE_NEXT(node)->nd_stts));
8640 ADD_INSN1(ret, line_node,
throw,
INT2FIX(throw_flag | TAG_NEXT));
8643 ADD_INSN(ret, line_node, pop);
8647 COMPILE_ERROR(ERROR_ARGS
"Invalid next");
8657 const NODE *line_node = node;
8659 if (ISEQ_COMPILE_DATA(iseq)->redo_label && can_add_ensure_iseq(iseq)) {
8660 LABEL *splabel = NEW_LABEL(0);
8661 debugs(
"redo in while");
8662 ADD_LABEL(ret, splabel);
8663 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8664 add_ensure_iseq(ret, iseq, 0);
8665 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->redo_label);
8666 ADD_ADJUST_RESTORE(ret, splabel);
8668 ADD_INSN(ret, line_node, putnil);
8671 else if (ISEQ_BODY(iseq)->
type != ISEQ_TYPE_EVAL && ISEQ_COMPILE_DATA(iseq)->start_label && can_add_ensure_iseq(iseq)) {
8672 LABEL *splabel = NEW_LABEL(0);
8674 debugs(
"redo in block");
8675 ADD_LABEL(ret, splabel);
8676 add_ensure_iseq(ret, iseq, 0);
8677 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8678 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8679 ADD_ADJUST_RESTORE(ret, splabel);
8682 ADD_INSN(ret, line_node, putnil);
8689 if (!ISEQ_COMPILE_DATA(ip)) {
8694 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8697 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8700 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8701 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with redo");
8705 ip = ISEQ_BODY(ip)->parent_iseq;
8708 ADD_INSN(ret, line_node, putnil);
8709 ADD_INSN1(ret, line_node,
throw,
INT2FIX(VM_THROW_NO_ESCAPE_FLAG | TAG_REDO));
8712 ADD_INSN(ret, line_node, pop);
8716 COMPILE_ERROR(ERROR_ARGS
"Invalid redo");
8726 const NODE *line_node = node;
8728 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE) {
8729 ADD_INSN(ret, line_node, putnil);
8730 ADD_INSN1(ret, line_node,
throw,
INT2FIX(TAG_RETRY));
8733 ADD_INSN(ret, line_node, pop);
8737 COMPILE_ERROR(ERROR_ARGS
"Invalid retry");
8746 const int line = nd_line(node);
8747 const NODE *line_node = node;
8748 LABEL *lstart = NEW_LABEL(line);
8749 LABEL *lend = NEW_LABEL(line);
8750 LABEL *lcont = NEW_LABEL(line);
8751 const rb_iseq_t *rescue = NEW_CHILD_ISEQ(RNODE_RESCUE(node)->nd_resq,
8753 ISEQ_BODY(iseq)->location.label),
8754 ISEQ_TYPE_RESCUE, line);
8756 lstart->rescued = LABEL_RESCUE_BEG;
8757 lend->rescued = LABEL_RESCUE_END;
8758 ADD_LABEL(ret, lstart);
8760 bool prev_in_rescue = ISEQ_COMPILE_DATA(iseq)->in_rescue;
8761 ISEQ_COMPILE_DATA(iseq)->in_rescue =
true;
8763 CHECK(COMPILE(ret,
"rescue head", RNODE_RESCUE(node)->nd_head));
8765 ISEQ_COMPILE_DATA(iseq)->in_rescue = prev_in_rescue;
8767 ADD_LABEL(ret, lend);
8768 if (RNODE_RESCUE(node)->nd_else) {
8769 ADD_INSN(ret, line_node, pop);
8770 CHECK(COMPILE(ret,
"rescue else", RNODE_RESCUE(node)->nd_else));
8772 ADD_INSN(ret, line_node, nop);
8773 ADD_LABEL(ret, lcont);
8776 ADD_INSN(ret, line_node, pop);
8780 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lcont);
8781 ADD_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
8788 const int line = nd_line(node);
8789 const NODE *line_node = node;
8790 const NODE *resq = node;
8792 LABEL *label_miss, *label_hit;
8795 label_miss = NEW_LABEL(line);
8796 label_hit = NEW_LABEL(line);
8798 narg = RNODE_RESBODY(resq)->nd_args;
8800 switch (nd_type(narg)) {
8803 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8804 CHECK(COMPILE(ret,
"rescue arg", RNODE_LIST(narg)->nd_head));
8805 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8806 ADD_INSNL(ret, line_node, branchif, label_hit);
8807 narg = RNODE_LIST(narg)->nd_next;
8813 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8814 CHECK(COMPILE(ret,
"rescue/cond splat", narg));
8815 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE | VM_CHECKMATCH_ARRAY));
8816 ADD_INSNL(ret, line_node, branchif, label_hit);
8819 UNKNOWN_NODE(
"NODE_RESBODY", narg, COMPILE_NG);
8823 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8825 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8826 ADD_INSNL(ret, line_node, branchif, label_hit);
8828 ADD_INSNL(ret, line_node, jump, label_miss);
8829 ADD_LABEL(ret, label_hit);
8832 if (RNODE_RESBODY(resq)->nd_exc_var) {
8833 CHECK(COMPILE_POPPED(ret,
"resbody exc_var", RNODE_RESBODY(resq)->nd_exc_var));
8836 if (nd_type(RNODE_RESBODY(resq)->nd_body) == NODE_BEGIN && RNODE_BEGIN(RNODE_RESBODY(resq)->nd_body)->nd_body == NULL && !RNODE_RESBODY(resq)->nd_exc_var) {
8838 ADD_SYNTHETIC_INSN(ret, nd_line(RNODE_RESBODY(resq)->nd_body), -1, putnil);
8841 CHECK(COMPILE(ret,
"resbody body", RNODE_RESBODY(resq)->nd_body));
8844 if (ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization) {
8845 ADD_INSN(ret, line_node, nop);
8847 ADD_INSN(ret, line_node, leave);
8848 ADD_LABEL(ret, label_miss);
8849 resq = RNODE_RESBODY(resq)->nd_next;
8857 const int line = nd_line(RNODE_ENSURE(node)->nd_ensr);
8858 const NODE *line_node = node;
8860 const rb_iseq_t *ensure = NEW_CHILD_ISEQ(RNODE_ENSURE(node)->nd_ensr,
8862 ISEQ_TYPE_ENSURE, line);
8863 LABEL *lstart = NEW_LABEL(line);
8864 LABEL *lend = NEW_LABEL(line);
8865 LABEL *lcont = NEW_LABEL(line);
8873 CHECK(COMPILE_POPPED(ensr,
"ensure ensr", RNODE_ENSURE(node)->nd_ensr));
8875 last_leave = last && IS_INSN(last) && IS_INSN_ID(last, leave);
8880 push_ensure_entry(iseq, &enl, &er, RNODE_ENSURE(node)->nd_ensr);
8882 ADD_LABEL(ret, lstart);
8883 CHECK(COMPILE_(ret,
"ensure head", RNODE_ENSURE(node)->nd_head, (popped | last_leave)));
8884 ADD_LABEL(ret, lend);
8886 if (!popped && last_leave) ADD_INSN(ret, line_node, putnil);
8887 ADD_LABEL(ret, lcont);
8888 if (last_leave) ADD_INSN(ret, line_node, pop);
8890 erange = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->erange;
8891 if (lstart->link.next != &lend->link) {
8893 ADD_CATCH_ENTRY(CATCH_TYPE_ENSURE, erange->begin, erange->end,
8895 erange = erange->next;
8899 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl.prev;
8906 const NODE *line_node = node;
8909 enum rb_iseq_type
type = ISEQ_BODY(iseq)->type;
8911 enum rb_iseq_type t =
type;
8912 const NODE *retval = RNODE_RETURN(node)->nd_stts;
8915 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE) {
8916 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
8917 t = ISEQ_BODY(is)->type;
8921 case ISEQ_TYPE_MAIN:
8923 rb_warn(
"argument of top-level return is ignored");
8927 type = ISEQ_TYPE_METHOD;
8934 if (
type == ISEQ_TYPE_METHOD) {
8935 splabel = NEW_LABEL(0);
8936 ADD_LABEL(ret, splabel);
8937 ADD_ADJUST(ret, line_node, 0);
8940 CHECK(COMPILE(ret,
"return nd_stts (return val)", retval));
8942 if (
type == ISEQ_TYPE_METHOD && can_add_ensure_iseq(iseq)) {
8943 add_ensure_iseq(ret, iseq, 1);
8945 ADD_INSN(ret, line_node, leave);
8946 ADD_ADJUST_RESTORE(ret, splabel);
8949 ADD_INSN(ret, line_node, putnil);
8953 ADD_INSN1(ret, line_node,
throw,
INT2FIX(TAG_RETURN));
8955 ADD_INSN(ret, line_node, pop);
8966 if (!i)
return false;
8967 if (IS_TRACE(i)) i = i->prev;
8968 if (!IS_INSN(i) || !IS_INSN_ID(i, putnil))
return false;
8970 if (IS_ADJUST(i)) i = i->prev;
8971 if (!IS_INSN(i))
return false;
8972 switch (INSN_OF(i)) {
8979 (ret->last = last->prev)->next = NULL;
8986 CHECK(COMPILE_(ret,
"nd_body", node, popped));
8988 if (!popped && !all_string_result_p(node)) {
8989 const NODE *line_node = node;
8990 const unsigned int flag = VM_CALL_FCALL;
8994 ADD_INSN(ret, line_node, dup);
8995 ADD_INSN1(ret, line_node, objtostring, new_callinfo(iseq, idTo_s, 0, flag, NULL, FALSE));
8996 ADD_INSN(ret, line_node, anytostring);
9004 int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
9006 debugs(
"id: %s idx: %d\n", rb_id2name(
id), idx);
9007 ADD_GETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
9013 LABEL *else_label = NEW_LABEL(nd_line(line_node));
9016 br = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"&.");
9018 ADD_INSN(recv, line_node, dup);
9019 ADD_INSNL(recv, line_node, branchnil, else_label);
9020 add_trace_branch_coverage(iseq, recv, nd_code_loc(node), nd_node_id(node), 0,
"then", br);
9028 if (!else_label)
return;
9029 end_label = NEW_LABEL(nd_line(line_node));
9030 ADD_INSNL(ret, line_node, jump, end_label);
9031 ADD_LABEL(ret, else_label);
9032 add_trace_branch_coverage(iseq, ret, nd_code_loc(node), nd_node_id(node), 1,
"else", branches);
9033 ADD_LABEL(ret, end_label);
9042 if (get_nd_recv(node) &&
9043 (nd_type_p(get_nd_recv(node), NODE_STR) || nd_type_p(get_nd_recv(node), NODE_FILE)) &&
9044 (get_node_call_nd_mid(node) == idFreeze || get_node_call_nd_mid(node) == idUMinus) &&
9045 get_nd_args(node) == NULL &&
9046 ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
9047 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
9048 VALUE str = get_string_value(get_nd_recv(node));
9049 if (get_node_call_nd_mid(node) == idUMinus) {
9050 ADD_INSN2(ret, line_node, opt_str_uminus, str,
9051 new_callinfo(iseq, idUMinus, 0, 0, NULL, FALSE));
9054 ADD_INSN2(ret, line_node, opt_str_freeze, str,
9055 new_callinfo(iseq, idFreeze, 0, 0, NULL, FALSE));
9059 ADD_INSN(ret, line_node, pop);
9067iseq_has_builtin_function_table(
const rb_iseq_t *iseq)
9069 return ISEQ_COMPILE_DATA(iseq)->builtin_function_table != NULL;
9073iseq_builtin_function_lookup(
const rb_iseq_t *iseq,
const char *name)
9076 const struct rb_builtin_function *table = ISEQ_COMPILE_DATA(iseq)->builtin_function_table;
9077 for (i=0; table[i].index != -1; i++) {
9078 if (strcmp(table[i].name, name) == 0) {
9086iseq_builtin_function_name(
const enum node_type
type,
const NODE *recv,
ID mid)
9088 const char *name = rb_id2name(mid);
9089 static const char prefix[] =
"__builtin_";
9090 const size_t prefix_len =
sizeof(prefix) - 1;
9095 switch (nd_type(recv)) {
9097 if (RNODE_VCALL(recv)->nd_mid == rb_intern(
"__builtin")) {
9102 if (RNODE_CONST(recv)->nd_vid == rb_intern(
"Primitive")) {
9112 if (UNLIKELY(strncmp(prefix, name, prefix_len) == 0)) {
9113 return &name[prefix_len];
9122delegate_call_p(
const rb_iseq_t *iseq,
unsigned int argc,
const LINK_ANCHOR *args,
unsigned int *pstart_index)
9129 else if (argc <= ISEQ_BODY(iseq)->local_table_size) {
9130 unsigned int start=0;
9135 argc + start <= ISEQ_BODY(iseq)->local_table_size;
9139 for (
unsigned int i=start; i-start<argc; i++) {
9140 if (IS_INSN(elem) &&
9141 INSN_OF(elem) == BIN(getlocal)) {
9142 int local_index =
FIX2INT(OPERAND_AT(elem, 0));
9143 int local_level =
FIX2INT(OPERAND_AT(elem, 1));
9145 if (local_level == 0) {
9146 unsigned int index = ISEQ_BODY(iseq)->local_table_size - (local_index - VM_ENV_DATA_SIZE + 1);
9148 fprintf(stderr,
"lvar:%s (%d), id:%s (%d) local_index:%d, local_size:%d\n",
9149 rb_id2name(ISEQ_BODY(iseq)->local_table[i]), i,
9150 rb_id2name(ISEQ_BODY(iseq)->local_table[index]), index,
9151 local_index, (
int)ISEQ_BODY(iseq)->local_table_size);
9175 *pstart_index = start;
9189 if (!node)
goto no_arg;
9191 if (!nd_type_p(node, NODE_LIST))
goto bad_arg;
9192 const NODE *next = RNODE_LIST(node)->nd_next;
9194 node = RNODE_LIST(node)->nd_head;
9195 if (!node)
goto no_arg;
9196 switch (nd_type(node)) {
9198 symbol = rb_node_sym_string_val(node);
9204 if (!
SYMBOL_P(symbol))
goto non_symbol_arg;
9207 if (strcmp(RSTRING_PTR(
string),
"leaf") == 0) {
9208 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_LEAF;
9210 else if (strcmp(RSTRING_PTR(
string),
"inline_block") == 0) {
9211 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_INLINE_BLOCK;
9213 else if (strcmp(RSTRING_PTR(
string),
"use_block") == 0) {
9214 iseq_set_use_block(iseq);
9216 else if (strcmp(RSTRING_PTR(
string),
"c_trace") == 0) {
9218 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_C_TRACE;
9227 COMPILE_ERROR(ERROR_ARGS
"attr!: no argument");
9230 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to attr!: %s", rb_builtin_class_name(symbol));
9233 COMPILE_ERROR(ERROR_ARGS
"unknown argument to attr!: %s", RSTRING_PTR(
string));
9236 UNKNOWN_NODE(
"attr!", node, COMPILE_NG);
9244 if (!node)
goto no_arg;
9245 if (!nd_type_p(node, NODE_LIST))
goto bad_arg;
9246 if (RNODE_LIST(node)->nd_next)
goto too_many_arg;
9247 node = RNODE_LIST(node)->nd_head;
9248 if (!node)
goto no_arg;
9249 switch (nd_type(node)) {
9251 name = rb_node_sym_string_val(node);
9256 if (!
SYMBOL_P(name))
goto non_symbol_arg;
9258 compile_lvar(iseq, ret, line_node,
SYM2ID(name));
9262 COMPILE_ERROR(ERROR_ARGS
"arg!: no argument");
9265 COMPILE_ERROR(ERROR_ARGS
"arg!: too many argument");
9268 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to arg!: %s",
9269 rb_builtin_class_name(name));
9272 UNKNOWN_NODE(
"arg!", node, COMPILE_NG);
9278 const NODE *node = ISEQ_COMPILE_DATA(iseq)->root_node;
9279 if (nd_type(node) == NODE_IF && RNODE_IF(node)->nd_cond == cond_node) {
9280 return RNODE_IF(node)->nd_body;
9283 rb_bug(
"mandatory_node: can't find mandatory node");
9288compile_builtin_mandatory_only_method(
rb_iseq_t *iseq,
const NODE *node,
const NODE *line_node)
9292 .pre_args_num = ISEQ_BODY(iseq)->param.lead_num,
9295 rb_node_init(RNODE(&args_node), NODE_ARGS);
9296 args_node.nd_ainfo = args;
9299 const int skip_local_size = ISEQ_BODY(iseq)->param.size - ISEQ_BODY(iseq)->param.lead_num;
9300 const int table_size = ISEQ_BODY(iseq)->local_table_size - skip_local_size;
9304 tbl->size = table_size;
9309 for (i=0; i<ISEQ_BODY(iseq)->param.lead_num; i++) {
9310 tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i];
9313 for (; i<table_size; i++) {
9314 tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i + skip_local_size];
9318 rb_node_init(RNODE(&scope_node), NODE_SCOPE);
9319 scope_node.nd_tbl = tbl;
9320 scope_node.nd_body = mandatory_node(iseq, node);
9321 scope_node.nd_parent = NULL;
9322 scope_node.nd_args = &args_node;
9324 VALUE ast_value = rb_ruby_ast_new(RNODE(&scope_node));
9327 rb_iseq_new_with_opt(ast_value, rb_iseq_base_label(iseq),
9328 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
9329 nd_line(line_node), NULL, 0,
9330 ISEQ_TYPE_METHOD, ISEQ_COMPILE_DATA(iseq)->option,
9331 ISEQ_BODY(iseq)->variable.script_lines);
9332 RB_OBJ_WRITE(iseq, &ISEQ_BODY(iseq)->mandatory_only_iseq, (
VALUE)mandatory_only_iseq);
9342 NODE *args_node = get_nd_args(node);
9344 if (parent_block != NULL) {
9345 COMPILE_ERROR(ERROR_ARGS_AT(line_node)
"should not call builtins here.");
9349# define BUILTIN_INLINE_PREFIX "_bi"
9350 char inline_func[
sizeof(BUILTIN_INLINE_PREFIX) +
DECIMAL_SIZE_OF(
int)];
9351 bool cconst =
false;
9356 if (strcmp(
"cstmt!", builtin_func) == 0 ||
9357 strcmp(
"cexpr!", builtin_func) == 0) {
9360 else if (strcmp(
"cconst!", builtin_func) == 0) {
9363 else if (strcmp(
"cinit!", builtin_func) == 0) {
9367 else if (strcmp(
"attr!", builtin_func) == 0) {
9368 return compile_builtin_attr(iseq, args_node);
9370 else if (strcmp(
"arg!", builtin_func) == 0) {
9371 return compile_builtin_arg(iseq, ret, args_node, line_node, popped);
9373 else if (strcmp(
"mandatory_only?", builtin_func) == 0) {
9375 rb_bug(
"mandatory_only? should be in if condition");
9377 else if (!LIST_INSN_SIZE_ZERO(ret)) {
9378 rb_bug(
"mandatory_only? should be put on top");
9381 ADD_INSN1(ret, line_node, putobject,
Qfalse);
9382 return compile_builtin_mandatory_only_method(iseq, node, line_node);
9385 rb_bug(
"can't find builtin function:%s", builtin_func);
9388 COMPILE_ERROR(ERROR_ARGS
"can't find builtin function:%s", builtin_func);
9392 int inline_index = nd_line(node);
9393 snprintf(inline_func,
sizeof(inline_func), BUILTIN_INLINE_PREFIX
"%d", inline_index);
9394 builtin_func = inline_func;
9400 typedef VALUE(*builtin_func0)(
void *,
VALUE);
9401 VALUE const_val = (*(builtin_func0)(uintptr_t)bf->func_ptr)(NULL,
Qnil);
9402 ADD_INSN1(ret, line_node, putobject, const_val);
9408 unsigned int flag = 0;
9410 VALUE argc = setup_args(iseq, args, args_node, &flag, &keywords);
9412 if (
FIX2INT(argc) != bf->argc) {
9413 COMPILE_ERROR(ERROR_ARGS
"argc is not match for builtin function:%s (expect %d but %d)",
9414 builtin_func, bf->argc,
FIX2INT(argc));
9418 unsigned int start_index;
9419 if (delegate_call_p(iseq,
FIX2INT(argc), args, &start_index)) {
9420 ADD_INSN2(ret, line_node, opt_invokebuiltin_delegate, bf,
INT2FIX(start_index));
9424 ADD_INSN1(ret, line_node, invokebuiltin, bf);
9427 if (popped) ADD_INSN(ret, line_node, pop);
9433compile_call(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
const enum node_type
type,
const NODE *
const line_node,
int popped,
bool assume_receiver)
9441 ID mid = get_node_call_nd_mid(node);
9443 unsigned int flag = 0;
9445 const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9446 LABEL *else_label = NULL;
9449 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
9455 if (nd_type_p(node, NODE_VCALL)) {
9460 CONST_ID(id_answer,
"the_answer_to_life_the_universe_and_everything");
9462 if (mid == id_bitblt) {
9463 ADD_INSN(ret, line_node, bitblt);
9466 else if (mid == id_answer) {
9467 ADD_INSN(ret, line_node, answer);
9479 if (nd_type_p(node, NODE_FCALL) &&
9480 (mid == goto_id || mid == label_id)) {
9483 st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
9486 if (!labels_table) {
9487 labels_table = st_init_numtable();
9488 ISEQ_COMPILE_DATA(iseq)->labels_table = labels_table;
9491 COMPILE_ERROR(ERROR_ARGS
"invalid goto/label format");
9495 if (mid == goto_id) {
9496 ADD_INSNL(ret, line_node, jump, label);
9499 ADD_LABEL(ret, label);
9506 const char *builtin_func;
9507 if (UNLIKELY(iseq_has_builtin_function_table(iseq)) &&
9508 (builtin_func = iseq_builtin_function_name(
type, get_nd_recv(node), mid)) != NULL) {
9509 return compile_builtin_function_call(iseq, ret, node, line_node, popped, parent_block, args, builtin_func);
9513 if (!assume_receiver) {
9514 if (
type == NODE_CALL ||
type == NODE_OPCALL ||
type == NODE_QCALL) {
9517 if (mid == idCall &&
9518 nd_type_p(get_nd_recv(node), NODE_LVAR) &&
9519 iseq_block_param_id_p(iseq, RNODE_LVAR(get_nd_recv(node))->nd_vid, &idx, &level)) {
9520 ADD_INSN2(recv, get_nd_recv(node), getblockparamproxy,
INT2FIX(idx + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
9522 else if (private_recv_p(node)) {
9523 ADD_INSN(recv, node, putself);
9524 flag |= VM_CALL_FCALL;
9527 CHECK(COMPILE(recv,
"recv", get_nd_recv(node)));
9530 if (
type == NODE_QCALL) {
9531 else_label = qcall_branch_start(iseq, recv, &branches, node, line_node);
9534 else if (
type == NODE_FCALL ||
type == NODE_VCALL) {
9535 ADD_CALL_RECEIVER(recv, line_node);
9540 if (
type != NODE_VCALL) {
9541 argc = setup_args(iseq, args, get_nd_args(node), &flag, &keywords);
9542 CHECK(!
NIL_P(argc));
9550 bool inline_new = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction &&
9551 mid == rb_intern(
"new") &&
9552 parent_block == NULL &&
9553 !(flag & VM_CALL_ARGS_BLOCKARG);
9556 ADD_INSN(ret, node, putnil);
9557 ADD_INSN(ret, node, swap);
9562 debugp_param(
"call args argc", argc);
9563 debugp_param(
"call method",
ID2SYM(mid));
9565 switch ((
int)
type) {
9567 flag |= VM_CALL_VCALL;
9570 flag |= VM_CALL_FCALL;
9573 if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
9574 ADD_INSN(ret, line_node, splatkw);
9577 LABEL *not_basic_new = NEW_LABEL(nd_line(node));
9578 LABEL *not_basic_new_finish = NEW_LABEL(nd_line(node));
9583 if (flag & VM_CALL_FORWARDING) {
9584 ci = (
VALUE)new_callinfo(iseq, mid,
NUM2INT(argc) + 1, flag, keywords, 0);
9587 ci = (
VALUE)new_callinfo(iseq, mid,
NUM2INT(argc), flag, keywords, 0);
9589 ADD_INSN2(ret, node, opt_new, ci, not_basic_new);
9590 LABEL_REF(not_basic_new);
9593 ADD_SEND_R(ret, line_node, rb_intern(
"initialize"), argc, parent_block,
INT2FIX(flag | VM_CALL_FCALL), keywords);
9594 ADD_INSNL(ret, line_node, jump, not_basic_new_finish);
9596 ADD_LABEL(ret, not_basic_new);
9598 ADD_SEND_R(ret, line_node, mid, argc, parent_block,
INT2FIX(flag), keywords);
9599 ADD_INSN(ret, line_node, swap);
9601 ADD_LABEL(ret, not_basic_new_finish);
9602 ADD_INSN(ret, line_node, pop);
9605 ADD_SEND_R(ret, line_node, mid, argc, parent_block,
INT2FIX(flag), keywords);
9608 qcall_branch_end(iseq, ret, else_label, branches, node, line_node);
9610 ADD_INSN(ret, line_node, pop);
9618 const int line = nd_line(node);
9620 unsigned int flag = 0;
9622 ID id = RNODE_OP_ASGN1(node)->nd_mid;
9648 ADD_INSN(ret, node, putnil);
9650 asgnflag = COMPILE_RECV(ret,
"NODE_OP_ASGN1 recv", node, RNODE_OP_ASGN1(node)->nd_recv);
9651 CHECK(asgnflag != -1);
9652 switch (nd_type(RNODE_OP_ASGN1(node)->nd_index)) {
9657 argc = setup_args(iseq, ret, RNODE_OP_ASGN1(node)->nd_index, &flag, NULL);
9658 CHECK(!
NIL_P(argc));
9660 int dup_argn =
FIX2INT(argc) + 1;
9661 ADD_INSN1(ret, node, dupn,
INT2FIX(dup_argn));
9663 ADD_SEND_R(ret, node, idAREF, argc, NULL,
INT2FIX(flag & ~VM_CALL_ARGS_SPLAT_MUT), NULL);
9665 if (
id == idOROP ||
id == idANDOP) {
9674 LABEL *label = NEW_LABEL(line);
9675 LABEL *lfin = NEW_LABEL(line);
9677 ADD_INSN(ret, node, dup);
9679 ADD_INSNL(ret, node, branchif, label);
9682 ADD_INSNL(ret, node, branchunless, label);
9684 ADD_INSN(ret, node, pop);
9686 CHECK(COMPILE(ret,
"NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9688 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9690 if (flag & VM_CALL_ARGS_SPLAT) {
9691 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9692 ADD_INSN(ret, node, swap);
9693 ADD_INSN1(ret, node, splatarray,
Qtrue);
9694 ADD_INSN(ret, node, swap);
9695 flag |= VM_CALL_ARGS_SPLAT_MUT;
9697 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9698 ADD_SEND_R(ret, node, idASET, argc, NULL,
INT2FIX(flag), NULL);
9701 ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL,
INT2FIX(flag), NULL);
9703 ADD_INSN(ret, node, pop);
9704 ADD_INSNL(ret, node, jump, lfin);
9705 ADD_LABEL(ret, label);
9707 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9709 ADD_INSN1(ret, node, adjuststack,
INT2FIX(dup_argn+1));
9710 ADD_LABEL(ret, lfin);
9713 CHECK(COMPILE(ret,
"NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9714 ADD_SEND(ret, node,
id,
INT2FIX(1));
9716 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9718 if (flag & VM_CALL_ARGS_SPLAT) {
9719 if (flag & VM_CALL_KW_SPLAT) {
9720 ADD_INSN1(ret, node, topn,
INT2FIX(2));
9721 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9722 ADD_INSN1(ret, node, splatarray,
Qtrue);
9723 flag |= VM_CALL_ARGS_SPLAT_MUT;
9725 ADD_INSN(ret, node, swap);
9726 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9727 ADD_INSN1(ret, node, setn,
INT2FIX(2));
9728 ADD_INSN(ret, node, pop);
9731 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9732 ADD_INSN(ret, node, swap);
9733 ADD_INSN1(ret, node, splatarray,
Qtrue);
9734 ADD_INSN(ret, node, swap);
9735 flag |= VM_CALL_ARGS_SPLAT_MUT;
9737 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9739 ADD_SEND_R(ret, node, idASET, argc, NULL,
INT2FIX(flag), NULL);
9742 ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL,
INT2FIX(flag), NULL);
9744 ADD_INSN(ret, node, pop);
9752 const int line = nd_line(node);
9753 ID atype = RNODE_OP_ASGN2(node)->nd_mid;
9754 ID vid = RNODE_OP_ASGN2(node)->nd_vid, aid = rb_id_attrset(vid);
9756 LABEL *lfin = NEW_LABEL(line);
9757 LABEL *lcfin = NEW_LABEL(line);
9812 asgnflag = COMPILE_RECV(ret,
"NODE_OP_ASGN2#recv", node, RNODE_OP_ASGN2(node)->nd_recv);
9813 CHECK(asgnflag != -1);
9814 if (RNODE_OP_ASGN2(node)->nd_aid) {
9815 lskip = NEW_LABEL(line);
9816 ADD_INSN(ret, node, dup);
9817 ADD_INSNL(ret, node, branchnil, lskip);
9819 ADD_INSN(ret, node, dup);
9820 ADD_SEND_WITH_FLAG(ret, node, vid,
INT2FIX(0),
INT2FIX(asgnflag));
9822 if (atype == idOROP || atype == idANDOP) {
9824 ADD_INSN(ret, node, dup);
9826 if (atype == idOROP) {
9827 ADD_INSNL(ret, node, branchif, lcfin);
9830 ADD_INSNL(ret, node, branchunless, lcfin);
9833 ADD_INSN(ret, node, pop);
9835 CHECK(COMPILE(ret,
"NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9837 ADD_INSN(ret, node, swap);
9838 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9840 ADD_SEND_WITH_FLAG(ret, node, aid,
INT2FIX(1),
INT2FIX(asgnflag));
9841 ADD_INSNL(ret, node, jump, lfin);
9843 ADD_LABEL(ret, lcfin);
9845 ADD_INSN(ret, node, swap);
9848 ADD_LABEL(ret, lfin);
9851 CHECK(COMPILE(ret,
"NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9852 ADD_SEND(ret, node, atype,
INT2FIX(1));
9854 ADD_INSN(ret, node, swap);
9855 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9857 ADD_SEND_WITH_FLAG(ret, node, aid,
INT2FIX(1),
INT2FIX(asgnflag));
9859 if (lskip && popped) {
9860 ADD_LABEL(ret, lskip);
9862 ADD_INSN(ret, node, pop);
9863 if (lskip && !popped) {
9864 ADD_LABEL(ret, lskip);
9869static int compile_shareable_constant_value(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
const NODE *lhs,
const NODE *value);
9874 const int line = nd_line(node);
9879 switch (nd_type(RNODE_OP_CDECL(node)->nd_head)) {
9881 ADD_INSN1(ret, node, putobject, rb_cObject);
9884 CHECK(COMPILE(ret,
"NODE_OP_CDECL/colon2#nd_head", RNODE_COLON2(RNODE_OP_CDECL(node)->nd_head)->nd_head));
9887 COMPILE_ERROR(ERROR_ARGS
"%s: invalid node in NODE_OP_CDECL",
9888 ruby_node_name(nd_type(RNODE_OP_CDECL(node)->nd_head)));
9891 mid = get_node_colon_nd_mid(RNODE_OP_CDECL(node)->nd_head);
9893 if (RNODE_OP_CDECL(node)->nd_aid == idOROP) {
9894 lassign = NEW_LABEL(line);
9895 ADD_INSN(ret, node, dup);
9896 ADD_INSN3(ret, node, defined,
INT2FIX(DEFINED_CONST_FROM),
9898 ADD_INSNL(ret, node, branchunless, lassign);
9900 ADD_INSN(ret, node, dup);
9901 ADD_INSN1(ret, node, putobject,
Qtrue);
9902 ADD_INSN1(ret, node, getconstant,
ID2SYM(mid));
9904 if (RNODE_OP_CDECL(node)->nd_aid == idOROP || RNODE_OP_CDECL(node)->nd_aid == idANDOP) {
9905 lfin = NEW_LABEL(line);
9906 if (!popped) ADD_INSN(ret, node, dup);
9907 if (RNODE_OP_CDECL(node)->nd_aid == idOROP)
9908 ADD_INSNL(ret, node, branchif, lfin);
9910 ADD_INSNL(ret, node, branchunless, lfin);
9912 if (!popped) ADD_INSN(ret, node, pop);
9913 if (lassign) ADD_LABEL(ret, lassign);
9914 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9917 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9919 ADD_INSN1(ret, node, dupn,
INT2FIX(2));
9920 ADD_INSN(ret, node, swap);
9922 ADD_INSN1(ret, node, setconstant,
ID2SYM(mid));
9923 ADD_LABEL(ret, lfin);
9924 if (!popped) ADD_INSN(ret, node, swap);
9925 ADD_INSN(ret, node, pop);
9928 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9930 ADD_CALL(ret, node, RNODE_OP_CDECL(node)->nd_aid,
INT2FIX(1));
9932 ADD_INSN(ret, node, swap);
9934 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9935 ADD_INSN(ret, node, swap);
9937 ADD_INSN1(ret, node, setconstant,
ID2SYM(mid));
9945 const int line = nd_line(node);
9946 LABEL *lfin = NEW_LABEL(line);
9949 if (
type == NODE_OP_ASGN_OR && !nd_type_p(RNODE_OP_ASGN_OR(node)->nd_head, NODE_IVAR)) {
9953 defined_expr(iseq, ret, RNODE_OP_ASGN_OR(node)->nd_head, lfinish,
Qfalse,
false);
9954 lassign = lfinish[1];
9956 lassign = NEW_LABEL(line);
9958 ADD_INSNL(ret, node, branchunless, lassign);
9961 lassign = NEW_LABEL(line);
9964 CHECK(COMPILE(ret,
"NODE_OP_ASGN_AND/OR#nd_head", RNODE_OP_ASGN_OR(node)->nd_head));
9967 ADD_INSN(ret, node, dup);
9970 if (
type == NODE_OP_ASGN_AND) {
9971 ADD_INSNL(ret, node, branchunless, lfin);
9974 ADD_INSNL(ret, node, branchif, lfin);
9978 ADD_INSN(ret, node, pop);
9981 ADD_LABEL(ret, lassign);
9982 CHECK(COMPILE_(ret,
"NODE_OP_ASGN_AND/OR#nd_value", RNODE_OP_ASGN_OR(node)->nd_value, popped));
9983 ADD_LABEL(ret, lfin);
9993 unsigned int flag = 0;
9995 const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9999 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
10001 if (
type == NODE_SUPER) {
10002 VALUE vargc = setup_args(iseq, args, RNODE_SUPER(node)->nd_args, &flag, &keywords);
10003 CHECK(!
NIL_P(vargc));
10005 if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
10006 ADD_INSN(args, node, splatkw);
10009 if (flag & VM_CALL_ARGS_BLOCKARG) {
10016 const rb_iseq_t *liseq = body->local_iseq;
10018 const struct rb_iseq_param_keyword *
const local_kwd = local_body->param.keyword;
10019 int lvar_level = get_lvar_level(iseq);
10021 argc = local_body->param.lead_num;
10024 for (i = 0; i < local_body->param.lead_num; i++) {
10025 int idx = local_body->local_table_size - i;
10026 ADD_GETLOCAL(args, node, idx, lvar_level);
10030 if (local_body->param.flags.forwardable) {
10031 flag |= VM_CALL_FORWARDING;
10032 int idx = local_body->local_table_size - get_local_var_idx(liseq, idDot3);
10033 ADD_GETLOCAL(args, node, idx, lvar_level);
10036 if (local_body->param.flags.has_opt) {
10039 for (j = 0; j < local_body->param.opt_num; j++) {
10040 int idx = local_body->local_table_size - (i + j);
10041 ADD_GETLOCAL(args, node, idx, lvar_level);
10046 if (local_body->param.flags.has_rest) {
10048 int idx = local_body->local_table_size - local_body->param.rest_start;
10049 ADD_GETLOCAL(args, node, idx, lvar_level);
10050 ADD_INSN1(args, node, splatarray, RBOOL(local_body->param.flags.has_post));
10052 argc = local_body->param.rest_start + 1;
10053 flag |= VM_CALL_ARGS_SPLAT;
10055 if (local_body->param.flags.has_post) {
10057 int post_len = local_body->param.post_num;
10058 int post_start = local_body->param.post_start;
10060 if (local_body->param.flags.has_rest) {
10062 for (j=0; j<post_len; j++) {
10063 int idx = local_body->local_table_size - (post_start + j);
10064 ADD_GETLOCAL(args, node, idx, lvar_level);
10066 ADD_INSN1(args, node, pushtoarray,
INT2FIX(j));
10067 flag |= VM_CALL_ARGS_SPLAT_MUT;
10072 for (j=0; j<post_len; j++) {
10073 int idx = local_body->local_table_size - (post_start + j);
10074 ADD_GETLOCAL(args, node, idx, lvar_level);
10076 argc = post_len + post_start;
10080 if (local_body->param.flags.has_kw) {
10081 int local_size = local_body->local_table_size;
10084 ADD_INSN1(args, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10086 if (local_body->param.flags.has_kwrest) {
10087 int idx = local_body->local_table_size - local_kwd->rest_start;
10088 ADD_GETLOCAL(args, node, idx, lvar_level);
10090 ADD_SEND (args, node, rb_intern(
"dup"),
INT2FIX(0));
10093 ADD_INSN1(args, node, newhash,
INT2FIX(0));
10095 for (i = 0; i < local_kwd->num; ++i) {
10096 ID id = local_kwd->table[i];
10097 int idx = local_size - get_local_var_idx(liseq,
id);
10098 ADD_INSN1(args, node, putobject,
ID2SYM(
id));
10099 ADD_GETLOCAL(args, node, idx, lvar_level);
10101 ADD_SEND(args, node, id_core_hash_merge_ptr,
INT2FIX(i * 2 + 1));
10102 flag |= VM_CALL_KW_SPLAT| VM_CALL_KW_SPLAT_MUT;
10104 else if (local_body->param.flags.has_kwrest) {
10105 int idx = local_body->local_table_size - local_kwd->rest_start;
10106 ADD_GETLOCAL(args, node, idx, lvar_level);
10108 flag |= VM_CALL_KW_SPLAT;
10112 if (use_block && parent_block == NULL) {
10113 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
10116 flag |= VM_CALL_SUPER | VM_CALL_FCALL;
10117 if (
type == NODE_ZSUPER) flag |= VM_CALL_ZSUPER;
10118 ADD_INSN(ret, node, putself);
10119 ADD_SEQ(ret, args);
10121 const struct rb_callinfo * ci = new_callinfo(iseq, 0, argc, flag, keywords, parent_block != NULL);
10123 if (vm_ci_flag(ci) & VM_CALL_FORWARDING) {
10124 ADD_INSN2(ret, node, invokesuperforward, ci, parent_block);
10127 ADD_INSN2(ret, node, invokesuper, ci, parent_block);
10131 ADD_INSN(ret, node, pop);
10141 unsigned int flag = 0;
10146 switch (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->
type) {
10147 case ISEQ_TYPE_TOP:
10148 case ISEQ_TYPE_MAIN:
10149 case ISEQ_TYPE_CLASS:
10150 COMPILE_ERROR(ERROR_ARGS
"Invalid yield");
10155 if (RNODE_YIELD(node)->nd_head) {
10156 argc = setup_args(iseq, args, RNODE_YIELD(node)->nd_head, &flag, &keywords);
10157 CHECK(!
NIL_P(argc));
10163 ADD_SEQ(ret, args);
10164 ADD_INSN1(ret, node, invokeblock, new_callinfo(iseq, 0,
FIX2INT(argc), flag, keywords, FALSE));
10165 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
10168 ADD_INSN(ret, node, pop);
10173 for (; tmp_iseq != ISEQ_BODY(iseq)->local_iseq; level++ ) {
10174 tmp_iseq = ISEQ_BODY(tmp_iseq)->parent_iseq;
10176 if (level > 0) access_outer_variables(iseq, level, rb_intern(
"yield"),
true);
10189 switch ((
int)
type) {
10192 VALUE re = rb_node_regx_string_val(node);
10193 RB_OBJ_SET_FROZEN_SHAREABLE(re);
10194 ADD_INSN1(recv, node, putobject, re);
10195 ADD_INSN2(val, node, getspecial,
INT2FIX(0),
10200 CHECK(COMPILE(recv,
"receiver", RNODE_MATCH2(node)->nd_recv));
10201 CHECK(COMPILE(val,
"value", RNODE_MATCH2(node)->nd_value));
10204 CHECK(COMPILE(recv,
"receiver", RNODE_MATCH3(node)->nd_value));
10205 CHECK(COMPILE(val,
"value", RNODE_MATCH3(node)->nd_recv));
10209 ADD_SEQ(ret, recv);
10211 ADD_SEND(ret, node, idEqTilde,
INT2FIX(1));
10213 if (nd_type_p(node, NODE_MATCH2) && RNODE_MATCH2(node)->nd_args) {
10214 compile_named_capture_assign(iseq, ret, RNODE_MATCH2(node)->nd_args);
10218 ADD_INSN(ret, node, pop);
10229 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache &&
10230 (segments = collect_const_segments(iseq, node))) {
10231 ISEQ_BODY(iseq)->ic_size++;
10232 ADD_INSN1(ret, node, opt_getconstant_path, segments);
10242 CHECK(compile_const_prefix(iseq, node, pref, body));
10243 if (LIST_INSN_SIZE_ZERO(pref)) {
10244 ADD_INSN(ret, node, putnil);
10245 ADD_SEQ(ret, body);
10248 ADD_SEQ(ret, pref);
10249 ADD_SEQ(ret, body);
10255 ADD_CALL_RECEIVER(ret, node);
10256 CHECK(COMPILE(ret,
"colon2#nd_head", RNODE_COLON2(node)->nd_head));
10257 ADD_CALL(ret, node, RNODE_COLON2(node)->nd_mid,
INT2FIX(1));
10260 ADD_INSN(ret, node, pop);
10268 debugi(
"colon3#nd_mid", RNODE_COLON3(node)->nd_mid);
10271 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
10272 ISEQ_BODY(iseq)->ic_size++;
10273 VALUE segments = rb_ary_new_from_args(2,
ID2SYM(idNULL),
ID2SYM(RNODE_COLON3(node)->nd_mid));
10274 RB_OBJ_SET_FROZEN_SHAREABLE(segments);
10275 ADD_INSN1(ret, node, opt_getconstant_path, segments);
10279 ADD_INSN1(ret, node, putobject, rb_cObject);
10280 ADD_INSN1(ret, node, putobject,
Qtrue);
10281 ADD_INSN1(ret, node, getconstant,
ID2SYM(RNODE_COLON3(node)->nd_mid));
10285 ADD_INSN(ret, node, pop);
10294 const NODE *b = RNODE_DOT2(node)->nd_beg;
10295 const NODE *e = RNODE_DOT2(node)->nd_end;
10297 if (optimizable_range_item_p(b) && optimizable_range_item_p(e)) {
10299 VALUE bv = optimized_range_item(b);
10300 VALUE ev = optimized_range_item(e);
10303 ADD_INSN1(ret, node, putobject, val);
10308 CHECK(COMPILE_(ret,
"min", b, popped));
10309 CHECK(COMPILE_(ret,
"max", e, popped));
10311 ADD_INSN1(ret, node, newrange, flag);
10321 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE) {
10322 ADD_GETLOCAL(ret, node, LVAR_ERRINFO, 0);
10328 if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_RESCUE) {
10331 ip = ISEQ_BODY(ip)->parent_iseq;
10335 ADD_GETLOCAL(ret, node, LVAR_ERRINFO, level);
10338 ADD_INSN(ret, node, putnil);
10349 LABEL *end_label = NEW_LABEL(nd_line(node));
10350 const NODE *default_value = get_nd_value(RNODE_KW_ARG(node)->nd_body);
10352 if (default_value == NODE_SPECIAL_REQUIRED_KEYWORD) {
10354 COMPILE_ERROR(ERROR_ARGS
"unreachable");
10357 else if (nd_type_p(default_value, NODE_SYM) ||
10358 nd_type_p(default_value, NODE_REGX) ||
10359 nd_type_p(default_value, NODE_LINE) ||
10360 nd_type_p(default_value, NODE_INTEGER) ||
10361 nd_type_p(default_value, NODE_FLOAT) ||
10362 nd_type_p(default_value, NODE_RATIONAL) ||
10363 nd_type_p(default_value, NODE_IMAGINARY) ||
10364 nd_type_p(default_value, NODE_NIL) ||
10365 nd_type_p(default_value, NODE_TRUE) ||
10366 nd_type_p(default_value, NODE_FALSE)) {
10367 COMPILE_ERROR(ERROR_ARGS
"unreachable");
10375 int kw_bits_idx = body->local_table_size - body->param.keyword->bits_start;
10376 int keyword_idx = body->param.keyword->num;
10378 ADD_INSN2(ret, node, checkkeyword,
INT2FIX(kw_bits_idx + VM_ENV_DATA_SIZE - 1),
INT2FIX(keyword_idx));
10379 ADD_INSNL(ret, node, branchif, end_label);
10380 CHECK(COMPILE_POPPED(ret,
"keyword default argument", RNODE_KW_ARG(node)->nd_body));
10381 ADD_LABEL(ret, end_label);
10391 unsigned int flag = 0;
10392 ID mid = RNODE_ATTRASGN(node)->nd_mid;
10394 LABEL *else_label = NULL;
10399 argc = setup_args(iseq, args, RNODE_ATTRASGN(node)->nd_args, &flag, NULL);
10400 CHECK(!
NIL_P(argc));
10402 int asgnflag = COMPILE_RECV(recv,
"recv", node, RNODE_ATTRASGN(node)->nd_recv);
10403 CHECK(asgnflag != -1);
10404 flag |= (
unsigned int)asgnflag;
10406 debugp_param(
"argc", argc);
10407 debugp_param(
"nd_mid",
ID2SYM(mid));
10411 mid = rb_id_attrset(mid);
10412 else_label = qcall_branch_start(iseq, recv, &branches, node, node);
10415 ADD_INSN(ret, node, putnil);
10416 ADD_SEQ(ret, recv);
10417 ADD_SEQ(ret, args);
10419 if (flag & VM_CALL_ARGS_SPLAT) {
10420 ADD_INSN(ret, node, dup);
10421 ADD_INSN1(ret, node, putobject,
INT2FIX(-1));
10422 ADD_SEND_WITH_FLAG(ret, node, idAREF,
INT2FIX(1),
INT2FIX(asgnflag));
10423 ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 2));
10424 ADD_INSN (ret, node, pop);
10427 ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 1));
10431 ADD_SEQ(ret, recv);
10432 ADD_SEQ(ret, args);
10434 ADD_SEND_WITH_FLAG(ret, node, mid, argc,
INT2FIX(flag));
10435 qcall_branch_end(iseq, ret, else_label, branches, node, node);
10436 ADD_INSN(ret, node, pop);
10443 ADD_INSN1(ret, value, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10451 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"make_shareable_copy"),
INT2FIX(1),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10458 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"make_shareable"),
INT2FIX(1),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10465node_const_decl_val(
const NODE *node)
10468 switch (nd_type(node)) {
10470 if (RNODE_CDECL(node)->nd_vid) {
10471 path = rb_id2str(RNODE_CDECL(node)->nd_vid);
10475 node = RNODE_CDECL(node)->nd_else;
10483 rb_str_append(path, rb_id2str(RNODE_COLON3(node)->nd_mid));
10486 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
10492 for (; node && nd_type_p(node, NODE_COLON2); node = RNODE_COLON2(node)->nd_head) {
10493 rb_ary_push(path, rb_id2str(RNODE_COLON2(node)->nd_mid));
10495 if (node && nd_type_p(node, NODE_CONST)) {
10497 rb_ary_push(path, rb_id2str(RNODE_CONST(node)->nd_vid));
10499 else if (node && nd_type_p(node, NODE_COLON3)) {
10501 rb_ary_push(path, rb_id2str(RNODE_COLON3(node)->nd_mid));
10511 path = rb_fstring(path);
10516const_decl_path(
NODE *dest)
10519 if (!nd_type_p(dest, NODE_CALL)) {
10520 path = node_const_decl_val(dest);
10531 VALUE path = const_decl_path(dest);
10532 ADD_INSN1(ret, value, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10533 CHECK(COMPILE(ret,
"compile_ensure_shareable_node", value));
10534 ADD_INSN1(ret, value, putobject, path);
10536 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"ensure_shareable"),
INT2FIX(2),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10541#ifndef SHAREABLE_BARE_EXPRESSION
10542#define SHAREABLE_BARE_EXPRESSION 1
10546compile_shareable_literal_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
NODE *dest,
const NODE *node,
size_t level,
VALUE *value_p,
int *shareable_literal_p)
10548# define compile_shareable_literal_constant_next(node, anchor, value_p, shareable_literal_p) \
10549 compile_shareable_literal_constant(iseq, anchor, shareable, dest, node, level+1, value_p, shareable_literal_p)
10551 DECL_ANCHOR(anchor);
10553 enum node_type
type = node ? nd_type(node) : NODE_NIL;
10565 *value_p = rb_node_sym_string_val(node);
10568 *value_p = rb_node_regx_string_val(node);
10571 *value_p = rb_node_line_lineno_val(node);
10574 *value_p = rb_node_integer_literal_val(node);
10577 *value_p = rb_node_float_literal_val(node);
10579 case NODE_RATIONAL:
10580 *value_p = rb_node_rational_literal_val(node);
10582 case NODE_IMAGINARY:
10583 *value_p = rb_node_imaginary_literal_val(node);
10585 case NODE_ENCODING:
10586 *value_p = rb_node_encoding_val(node);
10589 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10590 *shareable_literal_p = 1;
10594 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10595 if (shareable == rb_parser_shareable_literal) {
10601 ADD_SEND_WITH_FLAG(ret, node, idUMinus,
INT2FIX(0),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10604 *shareable_literal_p = 1;
10608 VALUE lit = rb_node_str_string_val(node);
10609 ADD_INSN1(ret, node, putobject, lit);
10612 *shareable_literal_p = 1;
10618 VALUE lit = rb_node_file_path_val(node);
10619 ADD_INSN1(ret, node, putobject, lit);
10622 *shareable_literal_p = 1;
10630 ADD_INSN1(ret, node, putobject, lit);
10633 *shareable_literal_p = 1;
10639 INIT_ANCHOR(anchor);
10641 for (
NODE *n = (
NODE *)node; n; n = RNODE_LIST(n)->nd_next) {
10643 int shareable_literal_p2;
10644 NODE *elt = RNODE_LIST(n)->nd_head;
10646 CHECK(compile_shareable_literal_constant_next(elt, anchor, &val, &shareable_literal_p2));
10647 if (shareable_literal_p2) {
10650 else if (
RTEST(lit)) {
10656 if (!UNDEF_P(val)) {
10668 if (!RNODE_HASH(node)->nd_brace) {
10670 *shareable_literal_p = 0;
10673 for (
NODE *n = RNODE_HASH(node)->nd_head; n; n = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_next) {
10674 if (!RNODE_LIST(n)->nd_head) {
10676 goto compile_shareable;
10680 INIT_ANCHOR(anchor);
10681 lit = rb_hash_new();
10682 for (
NODE *n = RNODE_HASH(node)->nd_head; n; n = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_next) {
10684 VALUE value_val = 0;
10685 int shareable_literal_p2;
10686 NODE *key = RNODE_LIST(n)->nd_head;
10687 NODE *val = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head;
10688 CHECK(compile_shareable_literal_constant_next(key, anchor, &key_val, &shareable_literal_p2));
10689 if (shareable_literal_p2) {
10692 else if (
RTEST(lit)) {
10693 rb_hash_clear(lit);
10696 CHECK(compile_shareable_literal_constant_next(val, anchor, &value_val, &shareable_literal_p2));
10697 if (shareable_literal_p2) {
10700 else if (
RTEST(lit)) {
10701 rb_hash_clear(lit);
10705 if (!UNDEF_P(key_val) && !UNDEF_P(value_val)) {
10706 rb_hash_aset(lit, key_val, value_val);
10709 rb_hash_clear(lit);
10720 if (shareable == rb_parser_shareable_literal &&
10721 (SHAREABLE_BARE_EXPRESSION || level > 0)) {
10722 CHECK(compile_ensure_shareable_node(iseq, ret, dest, node));
10724 *shareable_literal_p = 1;
10727 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10729 *shareable_literal_p = 0;
10735 if (nd_type(node) == NODE_LIST) {
10736 ADD_INSN1(anchor, node, newarray,
INT2FIX(RNODE_LIST(node)->as.nd_alen));
10738 else if (nd_type(node) == NODE_HASH) {
10739 int len = (int)RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10740 ADD_INSN1(anchor, node, newhash,
INT2FIX(
len));
10743 *shareable_literal_p = 0;
10744 ADD_SEQ(ret, anchor);
10750 if (nd_type(node) == NODE_LIST) {
10751 ADD_INSN1(anchor, node, newarray,
INT2FIX(RNODE_LIST(node)->as.nd_alen));
10753 else if (nd_type(node) == NODE_HASH) {
10754 int len = (int)RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10755 ADD_INSN1(anchor, node, newhash,
INT2FIX(
len));
10757 CHECK(compile_make_shareable_node(iseq, ret, anchor, node,
false));
10759 *shareable_literal_p = 1;
10763 ADD_INSN1(ret, node, putobject, val);
10766 *shareable_literal_p = 1;
10773compile_shareable_constant_value(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
const NODE *lhs,
const NODE *value)
10777 DECL_ANCHOR(anchor);
10778 INIT_ANCHOR(anchor);
10780 switch (shareable) {
10781 case rb_parser_shareable_none:
10782 CHECK(COMPILE(ret,
"compile_shareable_constant_value", value));
10785 case rb_parser_shareable_literal:
10786 CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (
NODE *)lhs, value, 0, &val, &literal_p));
10787 ADD_SEQ(ret, anchor);
10790 case rb_parser_shareable_copy:
10791 case rb_parser_shareable_everything:
10792 CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (
NODE *)lhs, value, 0, &val, &literal_p));
10794 CHECK(compile_make_shareable_node(iseq, ret, anchor, value, shareable == rb_parser_shareable_copy));
10797 ADD_SEQ(ret, anchor);
10801 rb_bug(
"unexpected rb_parser_shareability: %d", shareable);
10818 int lineno = ISEQ_COMPILE_DATA(iseq)->last_line;
10819 if (lineno == 0) lineno =
FIX2INT(rb_iseq_first_lineno(iseq));
10820 debugs(
"node: NODE_NIL(implicit)\n");
10821 ADD_SYNTHETIC_INSN(ret, lineno, -1, putnil);
10825 return iseq_compile_each0(iseq, ret, node, popped);
10831 const int line = (int)nd_line(node);
10832 const enum node_type
type = nd_type(node);
10835 if (ISEQ_COMPILE_DATA(iseq)->last_line == line) {
10839 if (nd_fl_newline(node)) {
10841 ISEQ_COMPILE_DATA(iseq)->last_line = line;
10842 if (line > 0 && ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq)) {
10843 event |= RUBY_EVENT_COVERAGE_LINE;
10845 ADD_TRACE(ret, event);
10849 debug_node_start(node);
10850#undef BEFORE_RETURN
10851#define BEFORE_RETURN debug_node_end()
10855 CHECK(compile_block(iseq, ret, node, popped));
10859 CHECK(compile_if(iseq, ret, node, popped,
type));
10862 CHECK(compile_case(iseq, ret, node, popped));
10865 CHECK(compile_case2(iseq, ret, node, popped));
10868 CHECK(compile_case3(iseq, ret, node, popped));
10872 CHECK(compile_loop(iseq, ret, node, popped,
type));
10876 CHECK(compile_iter(iseq, ret, node, popped));
10878 case NODE_FOR_MASGN:
10879 CHECK(compile_for_masgn(iseq, ret, node, popped));
10882 CHECK(compile_break(iseq, ret, node, popped));
10885 CHECK(compile_next(iseq, ret, node, popped));
10888 CHECK(compile_redo(iseq, ret, node, popped));
10891 CHECK(compile_retry(iseq, ret, node, popped));
10894 CHECK(COMPILE_(ret,
"NODE_BEGIN", RNODE_BEGIN(node)->nd_body, popped));
10898 CHECK(compile_rescue(iseq, ret, node, popped));
10901 CHECK(compile_resbody(iseq, ret, node, popped));
10904 CHECK(compile_ensure(iseq, ret, node, popped));
10909 LABEL *end_label = NEW_LABEL(line);
10910 CHECK(COMPILE(ret,
"nd_1st", RNODE_OR(node)->nd_1st));
10912 ADD_INSN(ret, node, dup);
10914 if (
type == NODE_AND) {
10915 ADD_INSNL(ret, node, branchunless, end_label);
10918 ADD_INSNL(ret, node, branchif, end_label);
10921 ADD_INSN(ret, node, pop);
10923 CHECK(COMPILE_(ret,
"nd_2nd", RNODE_OR(node)->nd_2nd, popped));
10924 ADD_LABEL(ret, end_label);
10929 bool prev_in_masgn = ISEQ_COMPILE_DATA(iseq)->in_masgn;
10930 ISEQ_COMPILE_DATA(iseq)->in_masgn =
true;
10931 compile_massign(iseq, ret, node, popped);
10932 ISEQ_COMPILE_DATA(iseq)->in_masgn = prev_in_masgn;
10937 ID id = RNODE_LASGN(node)->nd_vid;
10938 int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
10940 debugs(
"lvar: %s idx: %d\n", rb_id2name(
id), idx);
10941 CHECK(COMPILE(ret,
"rvalue", RNODE_LASGN(node)->nd_value));
10944 ADD_INSN(ret, node, dup);
10946 ADD_SETLOCAL(ret, node, idx, get_lvar_level(iseq));
10951 ID id = RNODE_DASGN(node)->nd_vid;
10952 CHECK(COMPILE(ret,
"dvalue", RNODE_DASGN(node)->nd_value));
10953 debugi(
"dassn id", rb_id2str(
id) ?
id :
'*');
10956 ADD_INSN(ret, node, dup);
10959 idx = get_dyna_var_idx(iseq,
id, &lv, &ls);
10962 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
10966 ADD_SETLOCAL(ret, node, ls - idx, lv);
10970 CHECK(COMPILE(ret,
"lvalue", RNODE_GASGN(node)->nd_value));
10973 ADD_INSN(ret, node, dup);
10975 ADD_INSN1(ret, node, setglobal,
ID2SYM(RNODE_GASGN(node)->nd_vid));
10979 CHECK(COMPILE(ret,
"lvalue", RNODE_IASGN(node)->nd_value));
10981 ADD_INSN(ret, node, dup);
10983 ADD_INSN2(ret, node, setinstancevariable,
10984 ID2SYM(RNODE_IASGN(node)->nd_vid),
10985 get_ivar_ic_value(iseq,RNODE_IASGN(node)->nd_vid));
10989 if (RNODE_CDECL(node)->nd_vid) {
10990 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
10993 ADD_INSN(ret, node, dup);
10996 ADD_INSN1(ret, node, putspecialobject,
10997 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
10998 ADD_INSN1(ret, node, setconstant,
ID2SYM(RNODE_CDECL(node)->nd_vid));
11001 compile_cpath(ret, iseq, RNODE_CDECL(node)->nd_else);
11002 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
11003 ADD_INSN(ret, node, swap);
11006 ADD_INSN1(ret, node, topn,
INT2FIX(1));
11007 ADD_INSN(ret, node, swap);
11010 ADD_INSN1(ret, node, setconstant,
ID2SYM(get_node_colon_nd_mid(RNODE_CDECL(node)->nd_else)));
11015 CHECK(COMPILE(ret,
"cvasgn val", RNODE_CVASGN(node)->nd_value));
11017 ADD_INSN(ret, node, dup);
11019 ADD_INSN2(ret, node, setclassvariable,
11020 ID2SYM(RNODE_CVASGN(node)->nd_vid),
11021 get_cvar_ic_value(iseq, RNODE_CVASGN(node)->nd_vid));
11024 case NODE_OP_ASGN1:
11025 CHECK(compile_op_asgn1(iseq, ret, node, popped));
11027 case NODE_OP_ASGN2:
11028 CHECK(compile_op_asgn2(iseq, ret, node, popped));
11030 case NODE_OP_CDECL:
11031 CHECK(compile_op_cdecl(iseq, ret, node, popped));
11033 case NODE_OP_ASGN_AND:
11034 case NODE_OP_ASGN_OR:
11035 CHECK(compile_op_log(iseq, ret, node, popped,
type));
11039 if (compile_call_precheck_freeze(iseq, ret, node, node, popped) == TRUE) {
11045 if (compile_call(iseq, ret, node,
type, node, popped,
false) == COMPILE_NG) {
11051 CHECK(compile_super(iseq, ret, node, popped,
type));
11054 CHECK(compile_array(iseq, ret, node, popped, TRUE) >= 0);
11059 ADD_INSN1(ret, node, newarray,
INT2FIX(0));
11064 CHECK(compile_hash(iseq, ret, node, FALSE, popped) >= 0);
11067 CHECK(compile_return(iseq, ret, node, popped));
11070 CHECK(compile_yield(iseq, ret, node, popped));
11074 compile_lvar(iseq, ret, node, RNODE_LVAR(node)->nd_vid);
11080 debugi(
"nd_vid", RNODE_DVAR(node)->nd_vid);
11082 idx = get_dyna_var_idx(iseq, RNODE_DVAR(node)->nd_vid, &lv, &ls);
11084 COMPILE_ERROR(ERROR_ARGS
"unknown dvar (%"PRIsVALUE
")",
11085 rb_id2str(RNODE_DVAR(node)->nd_vid));
11088 ADD_GETLOCAL(ret, node, ls - idx, lv);
11093 ADD_INSN1(ret, node, getglobal,
ID2SYM(RNODE_GVAR(node)->nd_vid));
11095 ADD_INSN(ret, node, pop);
11100 debugi(
"nd_vid", RNODE_IVAR(node)->nd_vid);
11102 ADD_INSN2(ret, node, getinstancevariable,
11103 ID2SYM(RNODE_IVAR(node)->nd_vid),
11104 get_ivar_ic_value(iseq, RNODE_IVAR(node)->nd_vid));
11109 debugi(
"nd_vid", RNODE_CONST(node)->nd_vid);
11111 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
11113 VALUE segments = rb_ary_new_from_args(1,
ID2SYM(RNODE_CONST(node)->nd_vid));
11114 RB_OBJ_SET_FROZEN_SHAREABLE(segments);
11115 ADD_INSN1(ret, node, opt_getconstant_path, segments);
11119 ADD_INSN(ret, node, putnil);
11120 ADD_INSN1(ret, node, putobject,
Qtrue);
11121 ADD_INSN1(ret, node, getconstant,
ID2SYM(RNODE_CONST(node)->nd_vid));
11125 ADD_INSN(ret, node, pop);
11131 ADD_INSN2(ret, node, getclassvariable,
11132 ID2SYM(RNODE_CVAR(node)->nd_vid),
11133 get_cvar_ic_value(iseq, RNODE_CVAR(node)->nd_vid));
11137 case NODE_NTH_REF:{
11139 if (!RNODE_NTH_REF(node)->nd_nth) {
11140 ADD_INSN(ret, node, putnil);
11143 ADD_INSN2(ret, node, getspecial,
INT2FIX(1) ,
11144 INT2FIX(RNODE_NTH_REF(node)->nd_nth << 1));
11148 case NODE_BACK_REF:{
11150 ADD_INSN2(ret, node, getspecial,
INT2FIX(1) ,
11151 INT2FIX(0x01 | (RNODE_BACK_REF(node)->nd_nth << 1)));
11158 CHECK(compile_match(iseq, ret, node, popped,
type));
11162 ADD_INSN1(ret, node, putobject, rb_node_sym_string_val(node));
11168 ADD_INSN1(ret, node, putobject, rb_node_line_lineno_val(node));
11172 case NODE_ENCODING:{
11174 ADD_INSN1(ret, node, putobject, rb_node_encoding_val(node));
11178 case NODE_INTEGER:{
11179 VALUE lit = rb_node_integer_literal_val(node);
11181 debugp_param(
"integer", lit);
11183 ADD_INSN1(ret, node, putobject, lit);
11189 VALUE lit = rb_node_float_literal_val(node);
11191 debugp_param(
"float", lit);
11193 ADD_INSN1(ret, node, putobject, lit);
11198 case NODE_RATIONAL:{
11199 VALUE lit = rb_node_rational_literal_val(node);
11201 debugp_param(
"rational", lit);
11203 ADD_INSN1(ret, node, putobject, lit);
11208 case NODE_IMAGINARY:{
11209 VALUE lit = rb_node_imaginary_literal_val(node);
11211 debugp_param(
"imaginary", lit);
11213 ADD_INSN1(ret, node, putobject, lit);
11220 debugp_param(
"nd_lit", get_string_value(node));
11222 VALUE lit = get_string_value(node);
11225 option->frozen_string_literal != ISEQ_FROZEN_STRING_LITERAL_DISABLED) {
11226 lit = rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), line);
11227 RB_OBJ_SET_SHAREABLE(lit);
11229 switch (option->frozen_string_literal) {
11230 case ISEQ_FROZEN_STRING_LITERAL_UNSET:
11231 ADD_INSN1(ret, node, putchilledstring, lit);
11233 case ISEQ_FROZEN_STRING_LITERAL_DISABLED:
11234 ADD_INSN1(ret, node, putstring, lit);
11236 case ISEQ_FROZEN_STRING_LITERAL_ENABLED:
11237 ADD_INSN1(ret, node, putobject, lit);
11240 rb_bug(
"invalid frozen_string_literal");
11247 compile_dstr(iseq, ret, node);
11250 ADD_INSN(ret, node, pop);
11255 ADD_CALL_RECEIVER(ret, node);
11256 VALUE str = rb_node_str_string_val(node);
11257 ADD_INSN1(ret, node, putobject, str);
11259 ADD_CALL(ret, node, idBackquote,
INT2FIX(1));
11262 ADD_INSN(ret, node, pop);
11267 ADD_CALL_RECEIVER(ret, node);
11268 compile_dstr(iseq, ret, node);
11269 ADD_CALL(ret, node, idBackquote,
INT2FIX(1));
11272 ADD_INSN(ret, node, pop);
11277 CHECK(compile_evstr(iseq, ret, RNODE_EVSTR(node)->nd_body, popped));
11281 VALUE lit = rb_node_regx_string_val(node);
11282 RB_OBJ_SET_SHAREABLE(lit);
11283 ADD_INSN1(ret, node, putobject, lit);
11289 compile_dregx(iseq, ret, node, popped);
11292 int ic_index = body->ise_size++;
11294 block_iseq = NEW_CHILD_ISEQ(RNODE_ONCE(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_PLAIN, line);
11296 ADD_INSN2(ret, node, once, block_iseq,
INT2FIX(ic_index));
11300 ADD_INSN(ret, node, pop);
11304 case NODE_ARGSCAT:{
11306 CHECK(COMPILE(ret,
"argscat head", RNODE_ARGSCAT(node)->nd_head));
11307 ADD_INSN1(ret, node, splatarray,
Qfalse);
11308 ADD_INSN(ret, node, pop);
11309 CHECK(COMPILE(ret,
"argscat body", RNODE_ARGSCAT(node)->nd_body));
11310 ADD_INSN1(ret, node, splatarray,
Qfalse);
11311 ADD_INSN(ret, node, pop);
11314 CHECK(COMPILE(ret,
"argscat head", RNODE_ARGSCAT(node)->nd_head));
11315 const NODE *body_node = RNODE_ARGSCAT(node)->nd_body;
11316 if (nd_type_p(body_node, NODE_LIST)) {
11317 CHECK(compile_array(iseq, ret, body_node, popped, FALSE) >= 0);
11320 CHECK(COMPILE(ret,
"argscat body", body_node));
11321 ADD_INSN(ret, node, concattoarray);
11326 case NODE_ARGSPUSH:{
11328 CHECK(COMPILE(ret,
"argspush head", RNODE_ARGSPUSH(node)->nd_head));
11329 ADD_INSN1(ret, node, splatarray,
Qfalse);
11330 ADD_INSN(ret, node, pop);
11331 CHECK(COMPILE_(ret,
"argspush body", RNODE_ARGSPUSH(node)->nd_body, popped));
11334 CHECK(COMPILE(ret,
"argspush head", RNODE_ARGSPUSH(node)->nd_head));
11335 const NODE *body_node = RNODE_ARGSPUSH(node)->nd_body;
11336 if (keyword_node_p(body_node)) {
11337 CHECK(COMPILE_(ret,
"array element", body_node, FALSE));
11338 ADD_INSN(ret, node, pushtoarraykwsplat);
11340 else if (static_literal_node_p(body_node, iseq,
false)) {
11341 ADD_INSN1(ret, body_node, putobject, static_literal_value(body_node, iseq));
11342 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
11345 CHECK(COMPILE_(ret,
"array element", body_node, FALSE));
11346 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
11352 CHECK(COMPILE(ret,
"splat", RNODE_SPLAT(node)->nd_head));
11353 ADD_INSN1(ret, node, splatarray,
Qtrue);
11356 ADD_INSN(ret, node, pop);
11361 ID mid = RNODE_DEFN(node)->nd_mid;
11362 const rb_iseq_t *method_iseq = NEW_ISEQ(RNODE_DEFN(node)->nd_defn,
11364 ISEQ_TYPE_METHOD, line);
11366 debugp_param(
"defn/iseq", rb_iseqw_new(method_iseq));
11367 ADD_INSN2(ret, node, definemethod,
ID2SYM(mid), method_iseq);
11371 ADD_INSN1(ret, node, putobject,
ID2SYM(mid));
11377 ID mid = RNODE_DEFS(node)->nd_mid;
11378 const rb_iseq_t * singleton_method_iseq = NEW_ISEQ(RNODE_DEFS(node)->nd_defn,
11380 ISEQ_TYPE_METHOD, line);
11382 debugp_param(
"defs/iseq", rb_iseqw_new(singleton_method_iseq));
11383 CHECK(COMPILE(ret,
"defs: recv", RNODE_DEFS(node)->nd_recv));
11384 ADD_INSN2(ret, node, definesmethod,
ID2SYM(mid), singleton_method_iseq);
11388 ADD_INSN1(ret, node, putobject,
ID2SYM(mid));
11393 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11394 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11395 CHECK(COMPILE(ret,
"alias arg1", RNODE_ALIAS(node)->nd_1st));
11396 CHECK(COMPILE(ret,
"alias arg2", RNODE_ALIAS(node)->nd_2nd));
11397 ADD_SEND(ret, node, id_core_set_method_alias,
INT2FIX(3));
11400 ADD_INSN(ret, node, pop);
11405 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11406 ADD_INSN1(ret, node, putobject,
ID2SYM(RNODE_VALIAS(node)->nd_alias));
11407 ADD_INSN1(ret, node, putobject,
ID2SYM(RNODE_VALIAS(node)->nd_orig));
11408 ADD_SEND(ret, node, id_core_set_variable_alias,
INT2FIX(2));
11411 ADD_INSN(ret, node, pop);
11418 for (
long i = 0; i < ary->len; i++) {
11419 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11420 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11421 CHECK(COMPILE(ret,
"undef arg", ary->data[i]));
11422 ADD_SEND(ret, node, id_core_undef_method,
INT2FIX(2));
11424 if (i < ary->
len - 1) {
11425 ADD_INSN(ret, node, pop);
11430 ADD_INSN(ret, node, pop);
11435 const rb_iseq_t *class_iseq = NEW_CHILD_ISEQ(RNODE_CLASS(node)->nd_body,
11436 rb_str_freeze(rb_sprintf(
"<class:%"PRIsVALUE
">", rb_id2str(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)))),
11437 ISEQ_TYPE_CLASS, line);
11438 const int flags = VM_DEFINECLASS_TYPE_CLASS |
11439 (RNODE_CLASS(node)->nd_super ? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS : 0) |
11440 compile_cpath(ret, iseq, RNODE_CLASS(node)->nd_cpath);
11442 CHECK(COMPILE(ret,
"super", RNODE_CLASS(node)->nd_super));
11443 ADD_INSN3(ret, node, defineclass,
ID2SYM(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)), class_iseq,
INT2FIX(flags));
11447 ADD_INSN(ret, node, pop);
11452 const rb_iseq_t *module_iseq = NEW_CHILD_ISEQ(RNODE_MODULE(node)->nd_body,
11453 rb_str_freeze(rb_sprintf(
"<module:%"PRIsVALUE
">", rb_id2str(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)))),
11454 ISEQ_TYPE_CLASS, line);
11455 const int flags = VM_DEFINECLASS_TYPE_MODULE |
11456 compile_cpath(ret, iseq, RNODE_MODULE(node)->nd_cpath);
11458 ADD_INSN (ret, node, putnil);
11459 ADD_INSN3(ret, node, defineclass,
ID2SYM(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)), module_iseq,
INT2FIX(flags));
11463 ADD_INSN(ret, node, pop);
11469 const rb_iseq_t *singleton_class = NEW_ISEQ(RNODE_SCLASS(node)->nd_body, rb_fstring_lit(
"singleton class"),
11470 ISEQ_TYPE_CLASS, line);
11472 CHECK(COMPILE(ret,
"sclass#recv", RNODE_SCLASS(node)->nd_recv));
11473 ADD_INSN (ret, node, putnil);
11474 CONST_ID(singletonclass,
"singletonclass");
11475 ADD_INSN3(ret, node, defineclass,
11476 ID2SYM(singletonclass), singleton_class,
11477 INT2FIX(VM_DEFINECLASS_TYPE_SINGLETON_CLASS));
11481 ADD_INSN(ret, node, pop);
11486 CHECK(compile_colon2(iseq, ret, node, popped));
11489 CHECK(compile_colon3(iseq, ret, node, popped));
11492 CHECK(compile_dots(iseq, ret, node, popped, FALSE));
11495 CHECK(compile_dots(iseq, ret, node, popped, TRUE));
11499 LABEL *lend = NEW_LABEL(line);
11500 LABEL *ltrue = NEW_LABEL(line);
11501 LABEL *lfalse = NEW_LABEL(line);
11502 CHECK(compile_flip_flop(iseq, ret, node,
type == NODE_FLIP2,
11504 ADD_LABEL(ret, ltrue);
11505 ADD_INSN1(ret, node, putobject,
Qtrue);
11506 ADD_INSNL(ret, node, jump, lend);
11507 ADD_LABEL(ret, lfalse);
11508 ADD_INSN1(ret, node, putobject,
Qfalse);
11509 ADD_LABEL(ret, lend);
11514 ADD_INSN(ret, node, putself);
11520 ADD_INSN(ret, node, putnil);
11526 ADD_INSN1(ret, node, putobject,
Qtrue);
11532 ADD_INSN1(ret, node, putobject,
Qfalse);
11537 CHECK(compile_errinfo(iseq, ret, node, popped));
11541 CHECK(compile_defined_expr(iseq, ret, node,
Qtrue,
false));
11544 case NODE_POSTEXE:{
11548 int is_index = body->ise_size++;
11550 rb_iseq_new_with_callback_new_callback(build_postexe_iseq, RNODE_POSTEXE(node)->nd_body);
11552 NEW_CHILD_ISEQ_WITH_CALLBACK(ifunc, rb_fstring(make_name_for_block(iseq)), ISEQ_TYPE_BLOCK, line);
11554 ADD_INSN2(ret, node, once, once_iseq,
INT2FIX(is_index));
11558 ADD_INSN(ret, node, pop);
11563 CHECK(compile_kw_arg(iseq, ret, node, popped));
11566 compile_dstr(iseq, ret, node);
11568 ADD_INSN(ret, node, intern);
11571 ADD_INSN(ret, node, pop);
11575 case NODE_ATTRASGN:
11576 CHECK(compile_attrasgn(iseq, ret, node, popped));
11580 const rb_iseq_t *block = NEW_CHILD_ISEQ(RNODE_LAMBDA(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, line);
11583 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11584 ADD_CALL_WITH_BLOCK(ret, node, idLambda, argc, block);
11588 ADD_INSN(ret, node, pop);
11593 UNKNOWN_NODE(
"iseq_compile_each", node, COMPILE_NG);
11608insn_data_length(
INSN *iobj)
11610 return insn_len(iobj->insn_id);
11614calc_sp_depth(
int depth,
INSN *insn)
11616 return comptime_insn_stack_increase(depth, insn->insn_id, insn->operands);
11620opobj_inspect(
VALUE obj)
11640insn_data_to_s_detail(
INSN *iobj)
11642 VALUE str = rb_sprintf(
"%-20s ", insn_name(iobj->insn_id));
11644 if (iobj->operands) {
11645 const char *types = insn_op_types(iobj->insn_id);
11648 for (j = 0; types[j]; j++) {
11649 char type = types[j];
11655 rb_str_catf(str, LABEL_FORMAT, lobj->label_no);
11673 VALUE v = OPERAND_AT(iobj, j);
11688 rb_str_catf(str,
"<ivc:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11691 rb_str_catf(str,
"<icvarc:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11694 rb_str_catf(str,
"<ise:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11700 if (vm_ci_mid(ci)) rb_str_catf(str,
"%"PRIsVALUE, rb_id2str(vm_ci_mid(ci)));
11701 rb_str_catf(str,
", %d>", vm_ci_argc(ci));
11709 void *func = (
void *)OPERAND_AT(iobj, j);
11712 if (dladdr(func, &info) && info.dli_sname) {
11717 rb_str_catf(str,
"<%p>", func);
11727 if (types[j + 1]) {
11738 dump_disasm_list_with_cursor(link, NULL, NULL);
11749 printf(
"-- raw disasm--------\n");
11752 if (curr) printf(curr == link ?
"*" :
" ");
11753 switch (link->type) {
11754 case ISEQ_ELEMENT_INSN:
11756 iobj = (
INSN *)link;
11757 str = insn_data_to_s_detail(iobj);
11758 printf(
" %04d %-65s(%4u)\n", pos,
StringValueCStr(str), iobj->insn_info.line_no);
11759 pos += insn_data_length(iobj);
11762 case ISEQ_ELEMENT_LABEL:
11764 lobj = (
LABEL *)link;
11765 printf(LABEL_FORMAT
" [sp: %d, unremovable: %d, refcnt: %d]%s\n", lobj->label_no, lobj->sp, lobj->unremovable, lobj->refcnt,
11766 dest == lobj ?
" <---" :
"");
11769 case ISEQ_ELEMENT_TRACE:
11772 printf(
" trace: %0x\n", trace->event);
11775 case ISEQ_ELEMENT_ADJUST:
11778 printf(
" adjust: [label: %d]\n", adjust->label ? adjust->label->label_no : -1);
11783 rb_raise(
rb_eSyntaxError,
"dump_disasm_list error: %d\n", (
int)link->type);
11787 printf(
"---------------------\n");
11792rb_insn_len(
VALUE insn)
11794 return insn_len(insn);
11798rb_insns_name(
int i)
11800 return insn_name(i);
11804rb_insns_name_array(
void)
11808 for (i = 0; i < VM_INSTRUCTION_SIZE; i++) {
11819 obj = rb_to_symbol_type(obj);
11821 if (st_lookup(labels_table, obj, &tmp) == 0) {
11822 label = NEW_LABEL(0);
11823 st_insert(labels_table, obj, (st_data_t)label);
11826 label = (
LABEL *)tmp;
11833get_exception_sym2type(
VALUE sym)
11835 static VALUE symRescue, symEnsure, symRetry;
11836 static VALUE symBreak, symRedo, symNext;
11838 if (symRescue == 0) {
11847 if (sym == symRescue)
return CATCH_TYPE_RESCUE;
11848 if (sym == symEnsure)
return CATCH_TYPE_ENSURE;
11849 if (sym == symRetry)
return CATCH_TYPE_RETRY;
11850 if (sym == symBreak)
return CATCH_TYPE_BREAK;
11851 if (sym == symRedo)
return CATCH_TYPE_REDO;
11852 if (sym == symNext)
return CATCH_TYPE_NEXT;
11853 rb_raise(
rb_eSyntaxError,
"invalid exception symbol: %+"PRIsVALUE, sym);
11866 LABEL *lstart, *lend, *lcont;
11881 lstart = register_label(iseq, labels_table,
RARRAY_AREF(v, 2));
11882 lend = register_label(iseq, labels_table,
RARRAY_AREF(v, 3));
11883 lcont = register_label(iseq, labels_table,
RARRAY_AREF(v, 4));
11887 if (
type == CATCH_TYPE_RESCUE ||
11888 type == CATCH_TYPE_BREAK ||
11889 type == CATCH_TYPE_NEXT) {
11895 ADD_CATCH_ENTRY(
type, lstart, lend, eiseq, lcont);
11903insn_make_insn_table(
void)
11907 table = st_init_numtable_with_size(VM_INSTRUCTION_SIZE);
11909 for (i=0; i<VM_INSTRUCTION_SIZE; i++) {
11923 iseqw = rb_iseq_load(op, (
VALUE)iseq,
Qnil);
11925 else if (
CLASS_OF(op) == rb_cISeq) {
11932 loaded_iseq = rb_iseqw_to_iseq(iseqw);
11933 return loaded_iseq;
11941 unsigned int flag = 0;
11952 if (!
NIL_P(vorig_argc)) orig_argc =
FIX2INT(vorig_argc);
11954 if (!
NIL_P(vkw_arg)) {
11957 size_t n = rb_callinfo_kwarg_bytes(
len);
11960 kw_arg->references = 0;
11961 kw_arg->keyword_len =
len;
11962 for (i = 0; i <
len; i++) {
11965 kw_arg->keywords[i] = kw;
11970 const struct rb_callinfo *ci = new_callinfo(iseq, mid, orig_argc, flag, kw_arg, (flag & VM_CALL_ARGS_SIMPLE) == 0);
11976event_name_to_flag(
VALUE sym)
11978#define CHECK_EVENT(ev) if (sym == ID2SYM(rb_intern_const(#ev))) return ev;
11999 int line_no = 0, node_id = -1, insn_idx = 0;
12000 int ret = COMPILE_OK;
12005 static struct st_table *insn_table;
12007 if (insn_table == 0) {
12008 insn_table = insn_make_insn_table();
12011 for (i=0; i<
len; i++) {
12017 ADD_TRACE(anchor, event);
12020 LABEL *label = register_label(iseq, labels_table, obj);
12021 ADD_LABEL(anchor, label);
12038 if (st_lookup(insn_table, (st_data_t)insn, &insn_id) == 0) {
12040 COMPILE_ERROR(iseq, line_no,
12041 "unknown instruction: %+"PRIsVALUE, insn);
12046 if (argc != insn_len((
VALUE)insn_id)-1) {
12047 COMPILE_ERROR(iseq, line_no,
12048 "operand size mismatch");
12054 argv = compile_data_calloc2(iseq,
sizeof(
VALUE), argc);
12059 (
enum ruby_vminsn_type)insn_id, argc, argv));
12061 for (j=0; j<argc; j++) {
12063 switch (insn_op_type((
VALUE)insn_id, j)) {
12065 LABEL *label = register_label(iseq, labels_table, op);
12066 argv[j] = (
VALUE)label;
12081 VALUE v = (
VALUE)iseq_build_load_iseq(iseq, op);
12092 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->ise_size) {
12093 ISEQ_BODY(iseq)->ise_size =
NUM2INT(op) + 1;
12099 op = rb_to_array_type(op);
12103 sym = rb_to_symbol_type(sym);
12108 argv[j] = segments;
12110 ISEQ_BODY(iseq)->ic_size++;
12115 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->ivc_size) {
12116 ISEQ_BODY(iseq)->ivc_size =
NUM2INT(op) + 1;
12121 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->icvarc_size) {
12122 ISEQ_BODY(iseq)->icvarc_size =
NUM2INT(op) + 1;
12126 argv[j] = iseq_build_callinfo_from_hash(iseq, op);
12129 argv[j] = rb_to_symbol_type(op);
12136 RHASH_TBL_RAW(map)->type = &cdhash_type;
12137 op = rb_to_array_type(op);
12142 register_label(iseq, labels_table, sym);
12143 rb_hash_aset(map, key, (
VALUE)label | 1);
12153#if SIZEOF_VALUE <= SIZEOF_LONG
12158 argv[j] = (
VALUE)funcptr;
12169 (
enum ruby_vminsn_type)insn_id, argc, NULL));
12173 rb_raise(
rb_eTypeError,
"unexpected object for instruction");
12178 validate_labels(iseq, labels_table);
12179 if (!ret)
return ret;
12180 return iseq_setup(iseq, anchor);
12183#define CHECK_ARRAY(v) rb_to_array_type(v)
12184#define CHECK_SYMBOL(v) rb_to_symbol_type(v)
12189 VALUE val = rb_hash_aref(param, sym);
12194 else if (!
NIL_P(val)) {
12195 rb_raise(
rb_eTypeError,
"invalid %+"PRIsVALUE
" Fixnum: %+"PRIsVALUE,
12201static const struct rb_iseq_param_keyword *
12207 VALUE key, sym, default_val;
12210 struct rb_iseq_param_keyword *keyword =
ZALLOC(
struct rb_iseq_param_keyword);
12212 ISEQ_BODY(iseq)->param.flags.has_kw = TRUE;
12214 keyword->num =
len;
12215#define SYM(s) ID2SYM(rb_intern_const(#s))
12216 (void)int_param(&keyword->bits_start, params, SYM(kwbits));
12217 i = keyword->bits_start - keyword->num;
12218 ids = (
ID *)&ISEQ_BODY(iseq)->local_table[i];
12222 for (i = 0; i <
len; i++) {
12226 goto default_values;
12229 keyword->required_num++;
12233 default_len =
len - i;
12234 if (default_len == 0) {
12235 keyword->table = ids;
12238 else if (default_len < 0) {
12244 for (j = 0; i <
len; i++, j++) {
12258 rb_raise(
rb_eTypeError,
"keyword default has unsupported len %+"PRIsVALUE, key);
12264 keyword->table = ids;
12265 keyword->default_values = dvs;
12271iseq_insn_each_object_mark_and_move(
VALUE * obj,
VALUE _)
12273 rb_gc_mark_and_move(obj);
12280 size_t size =
sizeof(
INSN);
12281 unsigned int pos = 0;
12284#ifdef STRICT_ALIGNMENT
12285 size_t padding = calc_padding((
void *)&storage->buff[pos], size);
12287 const size_t padding = 0;
12289 size_t offset = pos + size + padding;
12290 if (offset > storage->size || offset > storage->pos) {
12292 storage = storage->next;
12295#ifdef STRICT_ALIGNMENT
12296 pos += (int)padding;
12299 iobj = (
INSN *)&storage->buff[pos];
12301 if (iobj->operands) {
12302 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_mark_and_move, (
VALUE)0);
12315 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED,
12322#define SYM(s) ID2SYM(rb_intern_const(#s))
12324 unsigned int arg_size, local_size, stack_max;
12326 struct st_table *labels_table = st_init_numtable();
12328 VALUE arg_opt_labels = rb_hash_aref(params, SYM(opt));
12329 VALUE keywords = rb_hash_aref(params, SYM(keyword));
12331 DECL_ANCHOR(anchor);
12332 INIT_ANCHOR(anchor);
12335 ISEQ_BODY(iseq)->local_table_size =
len;
12336 ISEQ_BODY(iseq)->local_table = tbl =
len > 0 ? (
ID *)
ALLOC_N(
ID, ISEQ_BODY(iseq)->local_table_size) : NULL;
12338 for (i = 0; i <
len; i++) {
12341 if (sym_arg_rest == lv) {
12349#define INT_PARAM(F) int_param(&ISEQ_BODY(iseq)->param.F, params, SYM(F))
12350 if (INT_PARAM(lead_num)) {
12351 ISEQ_BODY(iseq)->param.flags.has_lead = TRUE;
12353 if (INT_PARAM(post_num)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12354 if (INT_PARAM(post_start)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12355 if (INT_PARAM(rest_start)) ISEQ_BODY(iseq)->param.flags.has_rest = TRUE;
12356 if (INT_PARAM(block_start)) ISEQ_BODY(iseq)->param.flags.has_block = TRUE;
12359#define INT_PARAM(F) F = (int_param(&x, misc, SYM(F)) ? (unsigned int)x : 0)
12361 INT_PARAM(arg_size);
12362 INT_PARAM(local_size);
12363 INT_PARAM(stack_max);
12368#ifdef USE_ISEQ_NODE_ID
12369 node_ids = rb_hash_aref(misc,
ID2SYM(rb_intern(
"node_ids")));
12377 ISEQ_BODY(iseq)->param.flags.has_opt = !!(
len - 1 >= 0);
12379 if (ISEQ_BODY(iseq)->param.flags.has_opt) {
12382 for (i = 0; i <
len; i++) {
12384 LABEL *label = register_label(iseq, labels_table, ent);
12385 opt_table[i] = (
VALUE)label;
12388 ISEQ_BODY(iseq)->param.opt_num =
len - 1;
12389 ISEQ_BODY(iseq)->param.opt_table = opt_table;
12392 else if (!
NIL_P(arg_opt_labels)) {
12393 rb_raise(
rb_eTypeError,
":opt param is not an array: %+"PRIsVALUE,
12398 ISEQ_BODY(iseq)->param.keyword = iseq_build_kw(iseq, params, keywords);
12400 else if (!
NIL_P(keywords)) {
12401 rb_raise(
rb_eTypeError,
":keywords param is not an array: %+"PRIsVALUE,
12405 if (
Qtrue == rb_hash_aref(params, SYM(ambiguous_param0))) {
12406 ISEQ_BODY(iseq)->param.flags.ambiguous_param0 = TRUE;
12409 if (
Qtrue == rb_hash_aref(params, SYM(use_block))) {
12410 ISEQ_BODY(iseq)->param.flags.use_block = TRUE;
12413 if (int_param(&i, params, SYM(kwrest))) {
12414 struct rb_iseq_param_keyword *keyword = (
struct rb_iseq_param_keyword *)ISEQ_BODY(iseq)->param.keyword;
12415 if (keyword == NULL) {
12416 ISEQ_BODY(iseq)->param.keyword = keyword =
ZALLOC(
struct rb_iseq_param_keyword);
12418 keyword->rest_start = i;
12419 ISEQ_BODY(iseq)->param.flags.has_kwrest = TRUE;
12422 iseq_calc_param_size(iseq);
12425 iseq_build_from_ary_exception(iseq, labels_table, exception);
12428 iseq_build_from_ary_body(iseq, anchor, body, node_ids, labels_wrapper);
12430 ISEQ_BODY(iseq)->param.size = arg_size;
12431 ISEQ_BODY(iseq)->local_table_size = local_size;
12432 ISEQ_BODY(iseq)->stack_max = stack_max;
12442 while (body->type == ISEQ_TYPE_BLOCK ||
12443 body->type == ISEQ_TYPE_RESCUE ||
12444 body->type == ISEQ_TYPE_ENSURE ||
12445 body->type == ISEQ_TYPE_EVAL ||
12446 body->type == ISEQ_TYPE_MAIN
12450 for (i = 0; i < body->local_table_size; i++) {
12451 if (body->local_table[i] ==
id) {
12455 iseq = body->parent_iseq;
12456 body = ISEQ_BODY(iseq);
12469 for (i=0; i<body->local_table_size; i++) {
12470 if (body->local_table[i] ==
id) {
12480#ifndef IBF_ISEQ_DEBUG
12481#define IBF_ISEQ_DEBUG 0
12484#ifndef IBF_ISEQ_ENABLE_LOCAL_BUFFER
12485#define IBF_ISEQ_ENABLE_LOCAL_BUFFER 0
12488typedef uint32_t ibf_offset_t;
12489#define IBF_OFFSET(ptr) ((ibf_offset_t)(VALUE)(ptr))
12491#define IBF_MAJOR_VERSION ISEQ_MAJOR_VERSION
12493#define IBF_DEVEL_VERSION 5
12494#define IBF_MINOR_VERSION (ISEQ_MINOR_VERSION * 10000 + IBF_DEVEL_VERSION)
12496#define IBF_MINOR_VERSION ISEQ_MINOR_VERSION
12499static const char IBF_ENDIAN_MARK =
12500#ifdef WORDS_BIGENDIAN
12509 uint32_t major_version;
12510 uint32_t minor_version;
12512 uint32_t extra_size;
12514 uint32_t iseq_list_size;
12515 uint32_t global_object_list_size;
12516 ibf_offset_t iseq_list_offset;
12517 ibf_offset_t global_object_list_offset;
12538 unsigned int obj_list_size;
12539 ibf_offset_t obj_list_offset;
12558pinned_list_mark(
void *ptr)
12562 for (i = 0; i < list->size; i++) {
12563 if (list->buffer[i]) {
12564 rb_gc_mark(list->buffer[i]);
12576 0, 0, RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_EMBEDDABLE
12580pinned_list_fetch(
VALUE list,
long offset)
12586 if (offset >= ptr->size) {
12587 rb_raise(
rb_eIndexError,
"object index out of range: %ld", offset);
12590 return ptr->buffer[offset];
12594pinned_list_store(
VALUE list,
long offset,
VALUE object)
12600 if (offset >= ptr->size) {
12601 rb_raise(
rb_eIndexError,
"object index out of range: %ld", offset);
12608pinned_list_new(
long size)
12610 size_t memsize = offsetof(
struct pinned_list, buffer) + size *
sizeof(
VALUE);
12611 VALUE obj_list = rb_data_typed_object_zalloc(0, memsize, &pinned_list_type);
12612 struct pinned_list * ptr = RTYPEDDATA_GET_DATA(obj_list);
12618ibf_dump_pos(
struct ibf_dump *dump)
12620 long pos = RSTRING_LEN(dump->current_buffer->str);
12621#if SIZEOF_LONG > SIZEOF_INT
12622 if (pos >= UINT_MAX) {
12626 return (
unsigned int)pos;
12630ibf_dump_align(
struct ibf_dump *dump,
size_t align)
12632 ibf_offset_t pos = ibf_dump_pos(dump);
12634 static const char padding[
sizeof(
VALUE)];
12635 size_t size = align - ((size_t)pos % align);
12636#if SIZEOF_LONG > SIZEOF_INT
12637 if (pos + size >= UINT_MAX) {
12641 for (; size >
sizeof(padding); size -=
sizeof(padding)) {
12642 rb_str_cat(dump->current_buffer->str, padding,
sizeof(padding));
12644 rb_str_cat(dump->current_buffer->str, padding, size);
12649ibf_dump_write(
struct ibf_dump *dump,
const void *buff,
unsigned long size)
12651 ibf_offset_t pos = ibf_dump_pos(dump);
12652#if SIZEOF_LONG > SIZEOF_INT
12654 if (size >= UINT_MAX || pos + size >= UINT_MAX) {
12658 rb_str_cat(dump->current_buffer->str, (
const char *)buff, size);
12663ibf_dump_write_byte(
struct ibf_dump *dump,
unsigned char byte)
12665 return ibf_dump_write(dump, &
byte,
sizeof(
unsigned char));
12669ibf_dump_overwrite(
struct ibf_dump *dump,
void *buff,
unsigned int size,
long offset)
12671 VALUE str = dump->current_buffer->str;
12672 char *ptr = RSTRING_PTR(str);
12673 if ((
unsigned long)(size + offset) > (
unsigned long)RSTRING_LEN(str))
12674 rb_bug(
"ibf_dump_overwrite: overflow");
12675 memcpy(ptr + offset, buff, size);
12679ibf_load_ptr(
const struct ibf_load *load, ibf_offset_t *offset,
int size)
12681 ibf_offset_t beg = *offset;
12683 return load->current_buffer->buff + beg;
12687ibf_load_alloc(
const struct ibf_load *load, ibf_offset_t offset,
size_t x,
size_t y)
12689 void *buff = ruby_xmalloc2(x, y);
12690 size_t size = x * y;
12691 memcpy(buff, load->current_buffer->buff + offset, size);
12695#define IBF_W_ALIGN(type) (RUBY_ALIGNOF(type) > 1 ? ibf_dump_align(dump, RUBY_ALIGNOF(type)) : (void)0)
12697#define IBF_W(b, type, n) (IBF_W_ALIGN(type), (type *)(VALUE)IBF_WP(b, type, n))
12698#define IBF_WV(variable) ibf_dump_write(dump, &(variable), sizeof(variable))
12699#define IBF_WP(b, type, n) ibf_dump_write(dump, (b), sizeof(type) * (n))
12700#define IBF_R(val, type, n) (type *)ibf_load_alloc(load, IBF_OFFSET(val), sizeof(type), (n))
12701#define IBF_ZERO(variable) memset(&(variable), 0, sizeof(variable))
12704ibf_table_lookup(
struct st_table *table, st_data_t key)
12708 if (st_lookup(table, key, &val)) {
12717ibf_table_find_or_insert(
struct st_table *table, st_data_t key)
12719 int index = ibf_table_lookup(table, key);
12722 index = (int)table->num_entries;
12723 st_insert(table, key, (st_data_t)index);
12731static void ibf_dump_object_list(
struct ibf_dump *dump, ibf_offset_t *obj_list_offset,
unsigned int *obj_list_size);
12737ibf_dump_object_table_new(
void)
12739 st_table *obj_table = st_init_numtable();
12740 st_insert(obj_table, (st_data_t)
Qnil, (st_data_t)0);
12748 return ibf_table_find_or_insert(dump->current_buffer->obj_table, (st_data_t)obj);
12754 if (
id == 0 || rb_id2name(
id) == NULL) {
12757 return ibf_dump_object(dump,
rb_id2sym(
id));
12761ibf_load_id(
const struct ibf_load *load,
const ID id_index)
12763 if (id_index == 0) {
12766 VALUE sym = ibf_load_object(load, id_index);
12776static ibf_offset_t ibf_dump_iseq_each(
struct ibf_dump *dump,
const rb_iseq_t *iseq);
12781 if (iseq == NULL) {
12785 return ibf_table_find_or_insert(dump->iseq_table, (st_data_t)iseq);
12789static unsigned char
12790ibf_load_byte(
const struct ibf_load *load, ibf_offset_t *offset)
12792 if (*offset >= load->current_buffer->size) { rb_raise(
rb_eRuntimeError,
"invalid bytecode"); }
12793 return (
unsigned char)load->current_buffer->buff[(*offset)++];
12809 if (
sizeof(
VALUE) > 8 || CHAR_BIT != 8) {
12810 ibf_dump_write(dump, &x,
sizeof(
VALUE));
12814 enum { max_byte_length =
sizeof(
VALUE) + 1 };
12816 unsigned char bytes[max_byte_length];
12819 for (n = 0; n <
sizeof(
VALUE) && (x >> (7 - n)); n++, x >>= 8) {
12820 bytes[max_byte_length - 1 - n] = (
unsigned char)x;
12826 bytes[max_byte_length - 1 - n] = (
unsigned char)x;
12829 ibf_dump_write(dump, bytes + max_byte_length - n, n);
12833ibf_load_small_value(
const struct ibf_load *load, ibf_offset_t *offset)
12835 if (
sizeof(
VALUE) > 8 || CHAR_BIT != 8) {
12836 union {
char s[
sizeof(
VALUE)];
VALUE v; } x;
12838 memcpy(x.s, load->current_buffer->buff + *offset,
sizeof(
VALUE));
12839 *offset +=
sizeof(
VALUE);
12844 enum { max_byte_length =
sizeof(
VALUE) + 1 };
12846 const unsigned char *buffer = (
const unsigned char *)load->current_buffer->buff;
12847 const unsigned char c = buffer[*offset];
12851 c == 0 ? 9 : ntz_int32(c) + 1;
12854 if (*offset + n > load->current_buffer->size) {
12859 for (i = 1; i < n; i++) {
12861 x |= (
VALUE)buffer[*offset + i];
12875 ibf_dump_write_small_value(dump, (
VALUE)bf->index);
12877 size_t len = strlen(bf->name);
12878 ibf_dump_write_small_value(dump, (
VALUE)
len);
12879 ibf_dump_write(dump, bf->name,
len);
12883ibf_load_builtin(
const struct ibf_load *load, ibf_offset_t *offset)
12885 int i = (int)ibf_load_small_value(load, offset);
12886 int len = (int)ibf_load_small_value(load, offset);
12887 const char *name = (
char *)ibf_load_ptr(load, offset,
len);
12890 fprintf(stderr,
"%.*s!!\n",
len, name);
12894 if (table == NULL) rb_raise(rb_eArgError,
"builtin function table is not provided");
12895 if (strncmp(table[i].name, name,
len) != 0) {
12896 rb_raise(rb_eArgError,
"builtin function index (%d) mismatch (expect %s but %s)", i, name, table[i].name);
12907 const int iseq_size = body->iseq_size;
12909 const VALUE *orig_code = rb_iseq_original_iseq(iseq);
12911 ibf_offset_t offset = ibf_dump_pos(dump);
12913 for (code_index=0; code_index<iseq_size;) {
12914 const VALUE insn = orig_code[code_index++];
12915 const char *types = insn_op_types(insn);
12920 ibf_dump_write_small_value(dump, insn);
12923 for (op_index=0; types[op_index]; op_index++, code_index++) {
12924 VALUE op = orig_code[code_index];
12927 switch (types[op_index]) {
12930 wv = ibf_dump_object(dump, op);
12939 wv = ibf_dump_object(dump, arr);
12947 wv = is - ISEQ_IS_ENTRY_START(body, types[op_index]);
12955 wv = ibf_dump_id(dump, (
ID)op);
12967 ibf_dump_write_small_value(dump, wv);
12977ibf_load_code(
const struct ibf_load *load,
rb_iseq_t *iseq, ibf_offset_t bytecode_offset, ibf_offset_t bytecode_size,
unsigned int iseq_size)
12980 unsigned int code_index;
12981 ibf_offset_t reading_pos = bytecode_offset;
12985 struct rb_call_data *cd_entries = load_body->call_data;
12988 load_body->iseq_encoded = code;
12989 load_body->iseq_size = 0;
12991 iseq_bits_t * mark_offset_bits;
12993 iseq_bits_t tmp[1] = {0};
12995 if (ISEQ_MBITS_BUFLEN(iseq_size) == 1) {
12996 mark_offset_bits = tmp;
12999 mark_offset_bits =
ZALLOC_N(iseq_bits_t, ISEQ_MBITS_BUFLEN(iseq_size));
13001 bool needs_bitmap =
false;
13003 for (code_index=0; code_index<iseq_size;) {
13005 const VALUE insn = code[code_index] = ibf_load_small_value(load, &reading_pos);
13006 const char *types = insn_op_types(insn);
13012 for (op_index=0; types[op_index]; op_index++, code_index++) {
13013 const char operand_type = types[op_index];
13014 switch (operand_type) {
13017 VALUE op = ibf_load_small_value(load, &reading_pos);
13018 VALUE v = ibf_load_object(load, op);
13019 code[code_index] = v;
13022 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13023 needs_bitmap =
true;
13029 VALUE op = ibf_load_small_value(load, &reading_pos);
13030 VALUE v = ibf_load_object(load, op);
13031 v = rb_hash_dup(v);
13032 RHASH_TBL_RAW(v)->type = &cdhash_type;
13034 RB_OBJ_SET_SHAREABLE(freeze_hide_obj(v));
13039 pinned_list_store(load->current_buffer->obj_list, (
long)op, v);
13041 code[code_index] = v;
13042 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13044 needs_bitmap =
true;
13049 VALUE op = (
VALUE)ibf_load_small_value(load, &reading_pos);
13051 code[code_index] = v;
13054 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13055 needs_bitmap =
true;
13061 VALUE op = ibf_load_small_value(load, &reading_pos);
13062 VALUE arr = ibf_load_object(load, op);
13064 IC ic = &ISEQ_IS_IC_ENTRY(load_body, ic_index++);
13065 ic->
segments = array_to_idlist(arr);
13067 code[code_index] = (
VALUE)ic;
13074 unsigned int op = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13076 ISE ic = ISEQ_IS_ENTRY_START(load_body, operand_type) + op;
13077 code[code_index] = (
VALUE)ic;
13079 if (operand_type == TS_IVC) {
13082 if (insn == BIN(setinstancevariable)) {
13083 ID iv_name = (
ID)code[code_index - 1];
13084 cache->iv_set_name = iv_name;
13087 cache->iv_set_name = 0;
13090 vm_ic_attr_index_initialize(cache, INVALID_SHAPE_ID);
13097 code[code_index] = (
VALUE)cd_entries++;
13102 VALUE op = ibf_load_small_value(load, &reading_pos);
13103 code[code_index] = ibf_load_id(load, (
ID)(
VALUE)op);
13110 code[code_index] = (
VALUE)ibf_load_builtin(load, &reading_pos);
13113 code[code_index] = ibf_load_small_value(load, &reading_pos);
13117 if (insn_len(insn) != op_index+1) {
13122 load_body->iseq_size = code_index;
13124 if (ISEQ_MBITS_BUFLEN(load_body->iseq_size) == 1) {
13125 load_body->mark_bits.single = mark_offset_bits[0];
13128 if (needs_bitmap) {
13129 load_body->mark_bits.list = mark_offset_bits;
13132 load_body->mark_bits.list = 0;
13133 ruby_xfree(mark_offset_bits);
13138 RUBY_ASSERT(reading_pos == bytecode_offset + bytecode_size);
13145 int opt_num = ISEQ_BODY(iseq)->param.opt_num;
13148 IBF_W_ALIGN(
VALUE);
13149 return ibf_dump_write(dump, ISEQ_BODY(iseq)->param.opt_table,
sizeof(
VALUE) * (opt_num + 1));
13152 return ibf_dump_pos(dump);
13157ibf_load_param_opt_table(
const struct ibf_load *load, ibf_offset_t opt_table_offset,
int opt_num)
13161 MEMCPY(table, load->current_buffer->buff + opt_table_offset,
VALUE, opt_num+1);
13172 const struct rb_iseq_param_keyword *kw = ISEQ_BODY(iseq)->param.keyword;
13175 struct rb_iseq_param_keyword dump_kw = *kw;
13176 int dv_num = kw->num - kw->required_num;
13181 for (i=0; i<kw->num; i++) ids[i] = (
ID)ibf_dump_id(dump, kw->table[i]);
13182 for (i=0; i<dv_num; i++) dvs[i] = (
VALUE)ibf_dump_object(dump, kw->default_values[i]);
13184 dump_kw.table = IBF_W(ids,
ID, kw->num);
13185 dump_kw.default_values = IBF_W(dvs,
VALUE, dv_num);
13186 IBF_W_ALIGN(
struct rb_iseq_param_keyword);
13187 return ibf_dump_write(dump, &dump_kw,
sizeof(
struct rb_iseq_param_keyword) * 1);
13194static const struct rb_iseq_param_keyword *
13195ibf_load_param_keyword(
const struct ibf_load *load, ibf_offset_t param_keyword_offset)
13197 if (param_keyword_offset) {
13198 struct rb_iseq_param_keyword *kw = IBF_R(param_keyword_offset,
struct rb_iseq_param_keyword, 1);
13199 int dv_num = kw->num - kw->required_num;
13200 VALUE *dvs = dv_num ? IBF_R(kw->default_values,
VALUE, dv_num) : NULL;
13203 for (i=0; i<dv_num; i++) {
13204 dvs[i] = ibf_load_object(load, dvs[i]);
13210 kw->default_values = dvs;
13221 ibf_offset_t offset = ibf_dump_pos(dump);
13225 for (i = 0; i < ISEQ_BODY(iseq)->insns_info.size; i++) {
13226 ibf_dump_write_small_value(dump, entries[i].line_no);
13227#ifdef USE_ISEQ_NODE_ID
13228 ibf_dump_write_small_value(dump, entries[i].node_id);
13230 ibf_dump_write_small_value(dump, entries[i].events);
13237ibf_load_insns_info_body(
const struct ibf_load *load, ibf_offset_t body_offset,
unsigned int size)
13239 ibf_offset_t reading_pos = body_offset;
13243 for (i = 0; i < size; i++) {
13244 entries[i].line_no = (int)ibf_load_small_value(load, &reading_pos);
13245#ifdef USE_ISEQ_NODE_ID
13246 entries[i].node_id = (int)ibf_load_small_value(load, &reading_pos);
13248 entries[i].events = (
rb_event_flag_t)ibf_load_small_value(load, &reading_pos);
13255ibf_dump_insns_info_positions(
struct ibf_dump *dump,
const unsigned int *positions,
unsigned int size)
13257 ibf_offset_t offset = ibf_dump_pos(dump);
13259 unsigned int last = 0;
13261 for (i = 0; i < size; i++) {
13262 ibf_dump_write_small_value(dump, positions[i] - last);
13263 last = positions[i];
13269static unsigned int *
13270ibf_load_insns_info_positions(
const struct ibf_load *load, ibf_offset_t positions_offset,
unsigned int size)
13272 ibf_offset_t reading_pos = positions_offset;
13273 unsigned int *positions =
ALLOC_N(
unsigned int, size);
13275 unsigned int last = 0;
13277 for (i = 0; i < size; i++) {
13278 positions[i] = last + (
unsigned int)ibf_load_small_value(load, &reading_pos);
13279 last = positions[i];
13289 const int size = body->local_table_size;
13293 for (i=0; i<size; i++) {
13294 VALUE v = ibf_dump_id(dump, body->local_table[i]);
13297 v = ibf_dump_object(dump,
ULONG2NUM(body->local_table[i]));
13303 return ibf_dump_write(dump, table,
sizeof(
ID) * size);
13307ibf_load_local_table(
const struct ibf_load *load, ibf_offset_t local_table_offset,
int size)
13310 ID *table = IBF_R(local_table_offset,
ID, size);
13313 for (i=0; i<size; i++) {
13314 table[i] = ibf_load_id(load, table[i]);
13317 if (size == 1 && table[0] == idERROR_INFO) {
13319 return rb_iseq_shared_exc_local_tbl;
13334 const int size = body->local_table_size;
13335 IBF_W_ALIGN(
enum lvar_state);
13336 return ibf_dump_write(dump, body->lvar_states,
sizeof(
enum lvar_state) * (body->lvar_states ? size : 0));
13339static enum lvar_state *
13340ibf_load_lvar_states(
const struct ibf_load *load, ibf_offset_t lvar_states_offset,
int size,
const ID *local_table)
13342 if (local_table == rb_iseq_shared_exc_local_tbl ||
13347 enum lvar_state *states = IBF_R(lvar_states_offset,
enum lvar_state, size);
13358 int *iseq_indices =
ALLOCA_N(
int, table->size);
13361 for (i=0; i<table->size; i++) {
13362 iseq_indices[i] = ibf_dump_iseq(dump, table->entries[i].iseq);
13365 const ibf_offset_t offset = ibf_dump_pos(dump);
13367 for (i=0; i<table->size; i++) {
13368 ibf_dump_write_small_value(dump, iseq_indices[i]);
13369 ibf_dump_write_small_value(dump, table->entries[i].type);
13370 ibf_dump_write_small_value(dump, table->entries[i].start);
13371 ibf_dump_write_small_value(dump, table->entries[i].end);
13372 ibf_dump_write_small_value(dump, table->entries[i].cont);
13373 ibf_dump_write_small_value(dump, table->entries[i].sp);
13378 return ibf_dump_pos(dump);
13383ibf_load_catch_table(
const struct ibf_load *load, ibf_offset_t catch_table_offset,
unsigned int size,
const rb_iseq_t *parent_iseq)
13386 struct iseq_catch_table *table = ruby_xcalloc(1, iseq_catch_table_bytes(size));
13387 table->size = size;
13388 ISEQ_BODY(parent_iseq)->catch_table = table;
13390 ibf_offset_t reading_pos = catch_table_offset;
13393 for (i=0; i<table->size; i++) {
13394 int iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13395 table->entries[i].type = (
enum rb_catch_type)ibf_load_small_value(load, &reading_pos);
13396 table->entries[i].start = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13397 table->entries[i].end = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13398 table->entries[i].cont = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13399 table->entries[i].sp = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13402 RB_OBJ_WRITE(parent_iseq, UNALIGNED_MEMBER_PTR(&table->entries[i], iseq), catch_iseq);
13406 ISEQ_BODY(parent_iseq)->catch_table = NULL;
13414 const unsigned int ci_size = body->ci_size;
13417 ibf_offset_t offset = ibf_dump_pos(dump);
13421 for (i = 0; i < ci_size; i++) {
13424 ibf_dump_write_small_value(dump, ibf_dump_id(dump, vm_ci_mid(ci)));
13425 ibf_dump_write_small_value(dump, vm_ci_flag(ci));
13426 ibf_dump_write_small_value(dump, vm_ci_argc(ci));
13430 int len = kwarg->keyword_len;
13431 ibf_dump_write_small_value(dump,
len);
13432 for (
int j=0; j<
len; j++) {
13433 VALUE keyword = ibf_dump_object(dump, kwarg->keywords[j]);
13434 ibf_dump_write_small_value(dump, keyword);
13438 ibf_dump_write_small_value(dump, 0);
13443 ibf_dump_write_small_value(dump, (
VALUE)-1);
13461static enum rb_id_table_iterator_result
13462store_outer_variable(
ID id,
VALUE val,
void *dump)
13467 pair->name = rb_id2str(
id);
13469 return ID_TABLE_CONTINUE;
13473outer_variable_cmp(
const void *a,
const void *b,
void *arg)
13481 else if (!bp->name) {
13491 struct rb_id_table * ovs = ISEQ_BODY(iseq)->outer_variables;
13493 ibf_offset_t offset = ibf_dump_pos(dump);
13495 size_t size = ovs ? rb_id_table_size(ovs) : 0;
13496 ibf_dump_write_small_value(dump, (
VALUE)size);
13505 rb_id_table_foreach(ovs, store_outer_variable, ovlist);
13507 for (
size_t i = 0; i < size; ++i) {
13508 ID id = ovlist->pairs[i].id;
13509 ID val = ovlist->pairs[i].val;
13510 ibf_dump_write_small_value(dump, ibf_dump_id(dump,
id));
13511 ibf_dump_write_small_value(dump, val);
13520ibf_load_ci_entries(
const struct ibf_load *load,
13521 ibf_offset_t ci_entries_offset,
13522 unsigned int ci_size,
13530 ibf_offset_t reading_pos = ci_entries_offset;
13537 for (i = 0; i < ci_size; i++) {
13538 VALUE mid_index = ibf_load_small_value(load, &reading_pos);
13539 if (mid_index != (
VALUE)-1) {
13540 ID mid = ibf_load_id(load, mid_index);
13541 unsigned int flag = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13542 unsigned int argc = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13545 int kwlen = (int)ibf_load_small_value(load, &reading_pos);
13548 kwarg->references = 0;
13549 kwarg->keyword_len = kwlen;
13550 for (
int j=0; j<kwlen; j++) {
13551 VALUE keyword = ibf_load_small_value(load, &reading_pos);
13552 kwarg->keywords[j] = ibf_load_object(load, keyword);
13556 cds[i].ci = vm_ci_new(mid, flag, argc, kwarg);
13558 cds[i].cc = vm_cc_empty();
13569ibf_load_outer_variables(
const struct ibf_load * load, ibf_offset_t outer_variables_offset)
13571 ibf_offset_t reading_pos = outer_variables_offset;
13575 size_t table_size = (size_t)ibf_load_small_value(load, &reading_pos);
13577 if (table_size > 0) {
13578 tbl = rb_id_table_create(table_size);
13581 for (
size_t i = 0; i < table_size; i++) {
13582 ID key = ibf_load_id(load, (
ID)ibf_load_small_value(load, &reading_pos));
13583 VALUE value = ibf_load_small_value(load, &reading_pos);
13584 if (!key) key = rb_make_temporary_id(i);
13585 rb_id_table_insert(tbl, key, value);
13594 RUBY_ASSERT(dump->current_buffer == &dump->global_buffer);
13596 unsigned int *positions;
13600 const VALUE location_pathobj_index = ibf_dump_object(dump, body->location.pathobj);
13601 const VALUE location_base_label_index = ibf_dump_object(dump, body->location.base_label);
13602 const VALUE location_label_index = ibf_dump_object(dump, body->location.label);
13604#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13605 ibf_offset_t iseq_start = ibf_dump_pos(dump);
13610 buffer.obj_table = ibf_dump_object_table_new();
13611 dump->current_buffer = &buffer;
13614 const ibf_offset_t bytecode_offset = ibf_dump_code(dump, iseq);
13615 const ibf_offset_t bytecode_size = ibf_dump_pos(dump) - bytecode_offset;
13616 const ibf_offset_t param_opt_table_offset = ibf_dump_param_opt_table(dump, iseq);
13617 const ibf_offset_t param_keyword_offset = ibf_dump_param_keyword(dump, iseq);
13618 const ibf_offset_t insns_info_body_offset = ibf_dump_insns_info_body(dump, iseq);
13620 positions = rb_iseq_insns_info_decode_positions(ISEQ_BODY(iseq));
13621 const ibf_offset_t insns_info_positions_offset = ibf_dump_insns_info_positions(dump, positions, body->insns_info.size);
13622 ruby_xfree(positions);
13624 const ibf_offset_t local_table_offset = ibf_dump_local_table(dump, iseq);
13625 const ibf_offset_t lvar_states_offset = ibf_dump_lvar_states(dump, iseq);
13626 const unsigned int catch_table_size = body->catch_table ? body->catch_table->size : 0;
13627 const ibf_offset_t catch_table_offset = ibf_dump_catch_table(dump, iseq);
13628 const int parent_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->parent_iseq);
13629 const int local_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->local_iseq);
13630 const int mandatory_only_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->mandatory_only_iseq);
13631 const ibf_offset_t ci_entries_offset = ibf_dump_ci_entries(dump, iseq);
13632 const ibf_offset_t outer_variables_offset = ibf_dump_outer_variables(dump, iseq);
13634#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13635 ibf_offset_t local_obj_list_offset;
13636 unsigned int local_obj_list_size;
13638 ibf_dump_object_list(dump, &local_obj_list_offset, &local_obj_list_size);
13641 ibf_offset_t body_offset = ibf_dump_pos(dump);
13644 unsigned int param_flags =
13645 (body->param.flags.has_lead << 0) |
13646 (body->param.flags.has_opt << 1) |
13647 (body->param.flags.has_rest << 2) |
13648 (body->param.flags.has_post << 3) |
13649 (body->param.flags.has_kw << 4) |
13650 (body->param.flags.has_kwrest << 5) |
13651 (body->param.flags.has_block << 6) |
13652 (body->param.flags.ambiguous_param0 << 7) |
13653 (body->param.flags.accepts_no_kwarg << 8) |
13654 (body->param.flags.ruby2_keywords << 9) |
13655 (body->param.flags.anon_rest << 10) |
13656 (body->param.flags.anon_kwrest << 11) |
13657 (body->param.flags.use_block << 12) |
13658 (body->param.flags.forwardable << 13) ;
13660#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13661# define IBF_BODY_OFFSET(x) (x)
13663# define IBF_BODY_OFFSET(x) (body_offset - (x))
13666 ibf_dump_write_small_value(dump, body->type);
13667 ibf_dump_write_small_value(dump, body->iseq_size);
13668 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(bytecode_offset));
13669 ibf_dump_write_small_value(dump, bytecode_size);
13670 ibf_dump_write_small_value(dump, param_flags);
13671 ibf_dump_write_small_value(dump, body->param.size);
13672 ibf_dump_write_small_value(dump, body->param.lead_num);
13673 ibf_dump_write_small_value(dump, body->param.opt_num);
13674 ibf_dump_write_small_value(dump, body->param.rest_start);
13675 ibf_dump_write_small_value(dump, body->param.post_start);
13676 ibf_dump_write_small_value(dump, body->param.post_num);
13677 ibf_dump_write_small_value(dump, body->param.block_start);
13678 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(param_opt_table_offset));
13679 ibf_dump_write_small_value(dump, param_keyword_offset);
13680 ibf_dump_write_small_value(dump, location_pathobj_index);
13681 ibf_dump_write_small_value(dump, location_base_label_index);
13682 ibf_dump_write_small_value(dump, location_label_index);
13683 ibf_dump_write_small_value(dump, body->location.first_lineno);
13684 ibf_dump_write_small_value(dump, body->location.node_id);
13685 ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.lineno);
13686 ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.column);
13687 ibf_dump_write_small_value(dump, body->location.code_location.end_pos.lineno);
13688 ibf_dump_write_small_value(dump, body->location.code_location.end_pos.column);
13689 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_body_offset));
13690 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_positions_offset));
13691 ibf_dump_write_small_value(dump, body->insns_info.size);
13692 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(local_table_offset));
13693 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(lvar_states_offset));
13694 ibf_dump_write_small_value(dump, catch_table_size);
13695 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(catch_table_offset));
13696 ibf_dump_write_small_value(dump, parent_iseq_index);
13697 ibf_dump_write_small_value(dump, local_iseq_index);
13698 ibf_dump_write_small_value(dump, mandatory_only_iseq_index);
13699 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(ci_entries_offset));
13700 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(outer_variables_offset));
13701 ibf_dump_write_small_value(dump, body->variable.flip_count);
13702 ibf_dump_write_small_value(dump, body->local_table_size);
13703 ibf_dump_write_small_value(dump, body->ivc_size);
13704 ibf_dump_write_small_value(dump, body->icvarc_size);
13705 ibf_dump_write_small_value(dump, body->ise_size);
13706 ibf_dump_write_small_value(dump, body->ic_size);
13707 ibf_dump_write_small_value(dump, body->ci_size);
13708 ibf_dump_write_small_value(dump, body->stack_max);
13709 ibf_dump_write_small_value(dump, body->builtin_attrs);
13710 ibf_dump_write_small_value(dump, body->prism ? 1 : 0);
13712#undef IBF_BODY_OFFSET
13714#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13715 ibf_offset_t iseq_length_bytes = ibf_dump_pos(dump);
13717 dump->current_buffer = saved_buffer;
13718 ibf_dump_write(dump, RSTRING_PTR(buffer.str), iseq_length_bytes);
13720 ibf_offset_t offset = ibf_dump_pos(dump);
13721 ibf_dump_write_small_value(dump, iseq_start);
13722 ibf_dump_write_small_value(dump, iseq_length_bytes);
13723 ibf_dump_write_small_value(dump, body_offset);
13725 ibf_dump_write_small_value(dump, local_obj_list_offset);
13726 ibf_dump_write_small_value(dump, local_obj_list_size);
13728 st_free_table(buffer.obj_table);
13732 return body_offset;
13737ibf_load_location_str(
const struct ibf_load *load,
VALUE str_index)
13739 VALUE str = ibf_load_object(load, str_index);
13741 str = rb_fstring(str);
13751 ibf_offset_t reading_pos = offset;
13753#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13755 load->current_buffer = &load->global_buffer;
13757 const ibf_offset_t iseq_start = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13758 const ibf_offset_t iseq_length_bytes = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13759 const ibf_offset_t body_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13762 buffer.buff = load->global_buffer.buff + iseq_start;
13763 buffer.size = iseq_length_bytes;
13764 buffer.obj_list_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13765 buffer.obj_list_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13766 buffer.obj_list = pinned_list_new(buffer.obj_list_size);
13768 load->current_buffer = &buffer;
13769 reading_pos = body_offset;
13772#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13773# define IBF_BODY_OFFSET(x) (x)
13775# define IBF_BODY_OFFSET(x) (offset - (x))
13778 const unsigned int type = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13779 const unsigned int iseq_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13780 const ibf_offset_t bytecode_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13781 const ibf_offset_t bytecode_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13782 const unsigned int param_flags = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13783 const unsigned int param_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13784 const int param_lead_num = (int)ibf_load_small_value(load, &reading_pos);
13785 const int param_opt_num = (int)ibf_load_small_value(load, &reading_pos);
13786 const int param_rest_start = (int)ibf_load_small_value(load, &reading_pos);
13787 const int param_post_start = (int)ibf_load_small_value(load, &reading_pos);
13788 const int param_post_num = (int)ibf_load_small_value(load, &reading_pos);
13789 const int param_block_start = (int)ibf_load_small_value(load, &reading_pos);
13790 const ibf_offset_t param_opt_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13791 const ibf_offset_t param_keyword_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13792 const VALUE location_pathobj_index = ibf_load_small_value(load, &reading_pos);
13793 const VALUE location_base_label_index = ibf_load_small_value(load, &reading_pos);
13794 const VALUE location_label_index = ibf_load_small_value(load, &reading_pos);
13795 const int location_first_lineno = (int)ibf_load_small_value(load, &reading_pos);
13796 const int location_node_id = (int)ibf_load_small_value(load, &reading_pos);
13797 const int location_code_location_beg_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13798 const int location_code_location_beg_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13799 const int location_code_location_end_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13800 const int location_code_location_end_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13801 const ibf_offset_t insns_info_body_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13802 const ibf_offset_t insns_info_positions_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13803 const unsigned int insns_info_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13804 const ibf_offset_t local_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13805 const ibf_offset_t lvar_states_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13806 const unsigned int catch_table_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13807 const ibf_offset_t catch_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13808 const int parent_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13809 const int local_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13810 const int mandatory_only_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13811 const ibf_offset_t ci_entries_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13812 const ibf_offset_t outer_variables_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13813 const rb_snum_t variable_flip_count = (rb_snum_t)ibf_load_small_value(load, &reading_pos);
13814 const unsigned int local_table_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13816 const unsigned int ivc_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13817 const unsigned int icvarc_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13818 const unsigned int ise_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13819 const unsigned int ic_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13821 const unsigned int ci_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13822 const unsigned int stack_max = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13823 const unsigned int builtin_attrs = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13824 const bool prism = (bool)ibf_load_small_value(load, &reading_pos);
13827 VALUE path = ibf_load_object(load, location_pathobj_index);
13832 realpath = path = rb_fstring(path);
13835 VALUE pathobj = path;
13841 if (!
NIL_P(realpath)) {
13843 rb_raise(rb_eArgError,
"unexpected realpath %"PRIxVALUE
13844 "(%x), path=%+"PRIsVALUE,
13845 realpath,
TYPE(realpath), path);
13847 realpath = rb_fstring(realpath);
13853 rb_iseq_pathobj_set(iseq, path, realpath);
13858 VALUE dummy_frame = rb_vm_push_frame_fname(ec, path);
13860#undef IBF_BODY_OFFSET
13862 load_body->type =
type;
13863 load_body->stack_max = stack_max;
13864 load_body->param.flags.has_lead = (param_flags >> 0) & 1;
13865 load_body->param.flags.has_opt = (param_flags >> 1) & 1;
13866 load_body->param.flags.has_rest = (param_flags >> 2) & 1;
13867 load_body->param.flags.has_post = (param_flags >> 3) & 1;
13868 load_body->param.flags.has_kw = FALSE;
13869 load_body->param.flags.has_kwrest = (param_flags >> 5) & 1;
13870 load_body->param.flags.has_block = (param_flags >> 6) & 1;
13871 load_body->param.flags.ambiguous_param0 = (param_flags >> 7) & 1;
13872 load_body->param.flags.accepts_no_kwarg = (param_flags >> 8) & 1;
13873 load_body->param.flags.ruby2_keywords = (param_flags >> 9) & 1;
13874 load_body->param.flags.anon_rest = (param_flags >> 10) & 1;
13875 load_body->param.flags.anon_kwrest = (param_flags >> 11) & 1;
13876 load_body->param.flags.use_block = (param_flags >> 12) & 1;
13877 load_body->param.flags.forwardable = (param_flags >> 13) & 1;
13878 load_body->param.size = param_size;
13879 load_body->param.lead_num = param_lead_num;
13880 load_body->param.opt_num = param_opt_num;
13881 load_body->param.rest_start = param_rest_start;
13882 load_body->param.post_start = param_post_start;
13883 load_body->param.post_num = param_post_num;
13884 load_body->param.block_start = param_block_start;
13885 load_body->local_table_size = local_table_size;
13886 load_body->ci_size = ci_size;
13887 load_body->insns_info.size = insns_info_size;
13889 ISEQ_COVERAGE_SET(iseq,
Qnil);
13890 ISEQ_ORIGINAL_ISEQ_CLEAR(iseq);
13891 load_body->variable.flip_count = variable_flip_count;
13892 load_body->variable.script_lines =
Qnil;
13894 load_body->location.first_lineno = location_first_lineno;
13895 load_body->location.node_id = location_node_id;
13896 load_body->location.code_location.beg_pos.lineno = location_code_location_beg_pos_lineno;
13897 load_body->location.code_location.beg_pos.column = location_code_location_beg_pos_column;
13898 load_body->location.code_location.end_pos.lineno = location_code_location_end_pos_lineno;
13899 load_body->location.code_location.end_pos.column = location_code_location_end_pos_column;
13900 load_body->builtin_attrs = builtin_attrs;
13901 load_body->prism = prism;
13903 load_body->ivc_size = ivc_size;
13904 load_body->icvarc_size = icvarc_size;
13905 load_body->ise_size = ise_size;
13906 load_body->ic_size = ic_size;
13908 if (ISEQ_IS_SIZE(load_body)) {
13912 load_body->is_entries = NULL;
13914 ibf_load_ci_entries(load, ci_entries_offset, ci_size, &load_body->call_data);
13915 load_body->outer_variables = ibf_load_outer_variables(load, outer_variables_offset);
13916 load_body->param.opt_table = ibf_load_param_opt_table(load, param_opt_table_offset, param_opt_num);
13917 load_body->param.keyword = ibf_load_param_keyword(load, param_keyword_offset);
13918 load_body->param.flags.has_kw = (param_flags >> 4) & 1;
13919 load_body->insns_info.body = ibf_load_insns_info_body(load, insns_info_body_offset, insns_info_size);
13920 load_body->insns_info.positions = ibf_load_insns_info_positions(load, insns_info_positions_offset, insns_info_size);
13921 load_body->local_table = ibf_load_local_table(load, local_table_offset, local_table_size);
13922 load_body->lvar_states = ibf_load_lvar_states(load, lvar_states_offset, local_table_size, load_body->local_table);
13923 ibf_load_catch_table(load, catch_table_offset, catch_table_size, iseq);
13927 const rb_iseq_t *mandatory_only_iseq = ibf_load_iseq(load, (
const rb_iseq_t *)(
VALUE)mandatory_only_iseq_index);
13929 RB_OBJ_WRITE(iseq, &load_body->parent_iseq, parent_iseq);
13930 RB_OBJ_WRITE(iseq, &load_body->local_iseq, local_iseq);
13931 RB_OBJ_WRITE(iseq, &load_body->mandatory_only_iseq, mandatory_only_iseq);
13934 if (load_body->param.keyword != NULL) {
13936 struct rb_iseq_param_keyword *keyword = (
struct rb_iseq_param_keyword *) load_body->param.keyword;
13937 keyword->table = &load_body->local_table[keyword->bits_start - keyword->num];
13940 ibf_load_code(load, iseq, bytecode_offset, bytecode_size, iseq_size);
13941#if VM_INSN_INFO_TABLE_IMPL == 2
13942 rb_iseq_insns_info_encode_positions(iseq);
13945 rb_iseq_translate_threaded_code(iseq);
13947#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13948 load->current_buffer = &load->global_buffer;
13951 RB_OBJ_WRITE(iseq, &load_body->location.base_label, ibf_load_location_str(load, location_base_label_index));
13952 RB_OBJ_WRITE(iseq, &load_body->location.label, ibf_load_location_str(load, location_label_index));
13954#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13955 load->current_buffer = saved_buffer;
13957 verify_call_cache(iseq);
13960 rb_vm_pop_frame_no_int(ec);
13970ibf_dump_iseq_list_i(st_data_t key, st_data_t val, st_data_t ptr)
13975 ibf_offset_t offset = ibf_dump_iseq_each(args->dump, iseq);
13978 return ST_CONTINUE;
13988 args.offset_list = offset_list;
13990 st_foreach(dump->iseq_table, ibf_dump_iseq_list_i, (st_data_t)&args);
13993 st_index_t size = dump->iseq_table->num_entries;
13994 ibf_offset_t *offsets =
ALLOCA_N(ibf_offset_t, size);
13996 for (i = 0; i < size; i++) {
14000 ibf_dump_align(dump,
sizeof(ibf_offset_t));
14001 header->iseq_list_offset = ibf_dump_write(dump, offsets,
sizeof(ibf_offset_t) * size);
14002 header->iseq_list_size = (
unsigned int)size;
14012 unsigned int type: 5;
14013 unsigned int special_const: 1;
14014 unsigned int frozen: 1;
14015 unsigned int internal: 1;
14018enum ibf_object_class_index {
14019 IBF_OBJECT_CLASS_OBJECT,
14020 IBF_OBJECT_CLASS_ARRAY,
14021 IBF_OBJECT_CLASS_STANDARD_ERROR,
14022 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR,
14023 IBF_OBJECT_CLASS_TYPE_ERROR,
14024 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR,
14034 long keyval[FLEX_ARY_LEN];
14047 BDIGIT digits[FLEX_ARY_LEN];
14050enum ibf_object_data_type {
14051 IBF_OBJECT_DATA_ENCODING,
14062#define IBF_ALIGNED_OFFSET(align, offset) \
14063 ((((offset) - 1) / (align) + 1) * (align))
14068#define IBF_OBJBODY(type, offset) \
14069 ibf_load_check_offset(load, IBF_ALIGNED_OFFSET(RUBY_ALIGNOF(type), offset))
14072ibf_load_check_offset(
const struct ibf_load *load,
size_t offset)
14074 if (offset >= load->current_buffer->size) {
14075 rb_raise(
rb_eIndexError,
"object offset out of range: %"PRIdSIZE, offset);
14077 return load->current_buffer->buff + offset;
14080NORETURN(
static void ibf_dump_object_unsupported(
struct ibf_dump *dump,
VALUE obj));
14083ibf_dump_object_unsupported(
struct ibf_dump *dump,
VALUE obj)
14086 rb_raw_obj_info(buff,
sizeof(buff), obj);
14095 rb_raise(rb_eArgError,
"unsupported");
14102 enum ibf_object_class_index cindex;
14103 if (obj == rb_cObject) {
14104 cindex = IBF_OBJECT_CLASS_OBJECT;
14107 cindex = IBF_OBJECT_CLASS_ARRAY;
14110 cindex = IBF_OBJECT_CLASS_STANDARD_ERROR;
14113 cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR;
14116 cindex = IBF_OBJECT_CLASS_TYPE_ERROR;
14119 cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR;
14122 rb_obj_info_dump(obj);
14124 rb_bug(
"unsupported class");
14126 ibf_dump_write_small_value(dump, (
VALUE)cindex);
14132 enum ibf_object_class_index cindex = (
enum ibf_object_class_index)ibf_load_small_value(load, &offset);
14135 case IBF_OBJECT_CLASS_OBJECT:
14137 case IBF_OBJECT_CLASS_ARRAY:
14139 case IBF_OBJECT_CLASS_STANDARD_ERROR:
14141 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR:
14143 case IBF_OBJECT_CLASS_TYPE_ERROR:
14145 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR:
14149 rb_raise(rb_eArgError,
"ibf_load_object_class: unknown class (%d)", (
int)cindex);
14157 (void)IBF_W(&dbl,
double, 1);
14165 memcpy(&d, IBF_OBJBODY(
double, offset),
sizeof(d));
14167 if (!
FLONUM_P(f)) RB_OBJ_SET_SHAREABLE(f);
14174 long encindex = (long)rb_enc_get_index(obj);
14175 long len = RSTRING_LEN(obj);
14176 const char *ptr = RSTRING_PTR(obj);
14178 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14179 rb_encoding *enc = rb_enc_from_index((
int)encindex);
14180 const char *enc_name = rb_enc_name(enc);
14181 encindex = RUBY_ENCINDEX_BUILTIN_MAX + ibf_dump_object(dump,
rb_str_new2(enc_name));
14184 ibf_dump_write_small_value(dump, encindex);
14185 ibf_dump_write_small_value(dump,
len);
14186 IBF_WP(ptr,
char,
len);
14192 ibf_offset_t reading_pos = offset;
14194 int encindex = (int)ibf_load_small_value(load, &reading_pos);
14195 const long len = (long)ibf_load_small_value(load, &reading_pos);
14196 const char *ptr = load->current_buffer->buff + reading_pos;
14198 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14199 VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
14200 encindex = rb_enc_find_index(RSTRING_PTR(enc_name_str));
14204 if (header->frozen && !header->internal) {
14205 str = rb_enc_literal_str(ptr,
len, rb_enc_from_index(encindex));
14208 str = rb_enc_str_new(ptr,
len, rb_enc_from_index(encindex));
14211 if (header->frozen) str = rb_fstring(str);
14222 regexp.srcstr = (long)ibf_dump_object(dump, srcstr);
14224 ibf_dump_write_byte(dump, (
unsigned char)regexp.option);
14225 ibf_dump_write_small_value(dump, regexp.srcstr);
14232 regexp.option = ibf_load_byte(load, &offset);
14233 regexp.srcstr = ibf_load_small_value(load, &offset);
14235 VALUE srcstr = ibf_load_object(load, regexp.srcstr);
14236 VALUE reg = rb_reg_compile(srcstr, (
int)regexp.option, NULL, 0);
14239 if (header->frozen) RB_OBJ_SET_SHAREABLE(
rb_obj_freeze(reg));
14248 ibf_dump_write_small_value(dump,
len);
14249 for (i=0; i<
len; i++) {
14250 long index = (long)ibf_dump_object(dump,
RARRAY_AREF(obj, i));
14251 ibf_dump_write_small_value(dump, index);
14258 ibf_offset_t reading_pos = offset;
14260 const long len = (long)ibf_load_small_value(load, &reading_pos);
14265 for (i=0; i<
len; i++) {
14266 const VALUE index = ibf_load_small_value(load, &reading_pos);
14270 if (header->frozen) {
14279ibf_dump_object_hash_i(st_data_t key, st_data_t val, st_data_t ptr)
14283 VALUE key_index = ibf_dump_object(dump, (
VALUE)key);
14284 VALUE val_index = ibf_dump_object(dump, (
VALUE)val);
14286 ibf_dump_write_small_value(dump, key_index);
14287 ibf_dump_write_small_value(dump, val_index);
14288 return ST_CONTINUE;
14295 ibf_dump_write_small_value(dump, (
VALUE)
len);
14303 long len = (long)ibf_load_small_value(load, &offset);
14304 VALUE obj = rb_hash_new_with_size(
len);
14307 for (i = 0; i <
len; i++) {
14308 VALUE key_index = ibf_load_small_value(load, &offset);
14309 VALUE val_index = ibf_load_small_value(load, &offset);
14311 VALUE key = ibf_load_object(load, key_index);
14312 VALUE val = ibf_load_object(load, val_index);
14313 rb_hash_aset(obj, key, val);
14315 rb_hash_rehash(obj);
14318 if (header->frozen) {
14319 RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14333 range.class_index = 0;
14336 range.beg = (long)ibf_dump_object(dump, beg);
14337 range.end = (long)ibf_dump_object(dump, end);
14343 rb_raise(
rb_eNotImpError,
"ibf_dump_object_struct: unsupported class %"PRIsVALUE,
14352 VALUE beg = ibf_load_object(load, range->beg);
14353 VALUE end = ibf_load_object(load, range->end);
14356 if (header->frozen) RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14363 ssize_t
len = BIGNUM_LEN(obj);
14364 ssize_t slen = BIGNUM_SIGN(obj) > 0 ?
len :
len * -1;
14365 BDIGIT *d = BIGNUM_DIGITS(obj);
14367 (void)IBF_W(&slen, ssize_t, 1);
14368 IBF_WP(d, BDIGIT,
len);
14375 int sign = bignum->slen > 0;
14376 ssize_t
len = sign > 0 ? bignum->slen : -1 * bignum->slen;
14377 const int big_unpack_flags =
14380 VALUE obj = rb_integer_unpack(bignum->digits,
len,
sizeof(BDIGIT), 0,
14384 if (header->frozen) RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14391 if (rb_data_is_encoding(obj)) {
14393 const char *name = rb_enc_name(enc);
14394 long len = strlen(name) + 1;
14396 data[0] = IBF_OBJECT_DATA_ENCODING;
14398 (void)IBF_W(data,
long, 2);
14399 IBF_WP(name,
char,
len);
14402 ibf_dump_object_unsupported(dump, obj);
14409 const long *body = IBF_OBJBODY(
long, offset);
14410 const enum ibf_object_data_type
type = (
enum ibf_object_data_type)body[0];
14412 const char *data = (
const char *)&body[2];
14415 case IBF_OBJECT_DATA_ENCODING:
14417 VALUE encobj = rb_enc_from_encoding(rb_enc_find(data));
14422 return ibf_load_object_unsupported(load, header, offset);
14426ibf_dump_object_complex_rational(
struct ibf_dump *dump,
VALUE obj)
14429 data[0] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->real);
14430 data[1] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->imag);
14432 (void)IBF_W(data,
long, 2);
14436ibf_load_object_complex_rational(
const struct ibf_load *load,
const struct ibf_object_header *header, ibf_offset_t offset)
14439 VALUE a = ibf_load_object(load, nums->a);
14440 VALUE b = ibf_load_object(load, nums->b);
14452 ibf_dump_object_string(dump,
rb_sym2str(obj));
14458 ibf_offset_t reading_pos = offset;
14460 int encindex = (int)ibf_load_small_value(load, &reading_pos);
14461 const long len = (long)ibf_load_small_value(load, &reading_pos);
14462 const char *ptr = load->current_buffer->buff + reading_pos;
14464 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14465 VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
14466 encindex = rb_enc_find_index(RSTRING_PTR(enc_name_str));
14469 ID id = rb_intern3(ptr,
len, rb_enc_from_index(encindex));
14473typedef void (*ibf_dump_object_function)(
struct ibf_dump *dump,
VALUE obj);
14474static const ibf_dump_object_function dump_object_functions[
RUBY_T_MASK+1] = {
14475 ibf_dump_object_unsupported,
14476 ibf_dump_object_unsupported,
14477 ibf_dump_object_class,
14478 ibf_dump_object_unsupported,
14479 ibf_dump_object_float,
14480 ibf_dump_object_string,
14481 ibf_dump_object_regexp,
14482 ibf_dump_object_array,
14483 ibf_dump_object_hash,
14484 ibf_dump_object_struct,
14485 ibf_dump_object_bignum,
14486 ibf_dump_object_unsupported,
14487 ibf_dump_object_data,
14488 ibf_dump_object_unsupported,
14489 ibf_dump_object_complex_rational,
14490 ibf_dump_object_complex_rational,
14491 ibf_dump_object_unsupported,
14492 ibf_dump_object_unsupported,
14493 ibf_dump_object_unsupported,
14494 ibf_dump_object_unsupported,
14495 ibf_dump_object_symbol,
14496 ibf_dump_object_unsupported,
14497 ibf_dump_object_unsupported,
14498 ibf_dump_object_unsupported,
14499 ibf_dump_object_unsupported,
14500 ibf_dump_object_unsupported,
14501 ibf_dump_object_unsupported,
14502 ibf_dump_object_unsupported,
14503 ibf_dump_object_unsupported,
14504 ibf_dump_object_unsupported,
14505 ibf_dump_object_unsupported,
14506 ibf_dump_object_unsupported,
14512 unsigned char byte =
14513 (header.type << 0) |
14514 (header.special_const << 5) |
14515 (header.frozen << 6) |
14516 (header.internal << 7);
14522ibf_load_object_object_header(const struct
ibf_load *load, ibf_offset_t *offset)
14524 unsigned char byte = ibf_load_byte(load, offset);
14527 header.type = (
byte >> 0) & 0x1f;
14528 header.special_const = (
byte >> 5) & 0x01;
14529 header.frozen = (
byte >> 6) & 0x01;
14530 header.internal = (
byte >> 7) & 0x01;
14539 ibf_offset_t current_offset;
14540 IBF_ZERO(obj_header);
14541 obj_header.type =
TYPE(obj);
14543 IBF_W_ALIGN(ibf_offset_t);
14544 current_offset = ibf_dump_pos(dump);
14549 obj_header.special_const = TRUE;
14550 obj_header.frozen = TRUE;
14551 obj_header.internal = TRUE;
14552 ibf_dump_object_object_header(dump, obj_header);
14553 ibf_dump_write_small_value(dump, obj);
14557 obj_header.special_const = FALSE;
14558 obj_header.frozen =
OBJ_FROZEN(obj) ? TRUE : FALSE;
14559 ibf_dump_object_object_header(dump, obj_header);
14560 (*dump_object_functions[obj_header.type])(dump, obj);
14563 return current_offset;
14567static const ibf_load_object_function load_object_functions[
RUBY_T_MASK+1] = {
14568 ibf_load_object_unsupported,
14569 ibf_load_object_unsupported,
14570 ibf_load_object_class,
14571 ibf_load_object_unsupported,
14572 ibf_load_object_float,
14573 ibf_load_object_string,
14574 ibf_load_object_regexp,
14575 ibf_load_object_array,
14576 ibf_load_object_hash,
14577 ibf_load_object_struct,
14578 ibf_load_object_bignum,
14579 ibf_load_object_unsupported,
14580 ibf_load_object_data,
14581 ibf_load_object_unsupported,
14582 ibf_load_object_complex_rational,
14583 ibf_load_object_complex_rational,
14584 ibf_load_object_unsupported,
14585 ibf_load_object_unsupported,
14586 ibf_load_object_unsupported,
14587 ibf_load_object_unsupported,
14588 ibf_load_object_symbol,
14589 ibf_load_object_unsupported,
14590 ibf_load_object_unsupported,
14591 ibf_load_object_unsupported,
14592 ibf_load_object_unsupported,
14593 ibf_load_object_unsupported,
14594 ibf_load_object_unsupported,
14595 ibf_load_object_unsupported,
14596 ibf_load_object_unsupported,
14597 ibf_load_object_unsupported,
14598 ibf_load_object_unsupported,
14599 ibf_load_object_unsupported,
14603ibf_load_object(
const struct ibf_load *load,
VALUE object_index)
14605 if (object_index == 0) {
14609 VALUE obj = pinned_list_fetch(load->current_buffer->obj_list, (
long)object_index);
14611 ibf_offset_t *offsets = (ibf_offset_t *)(load->current_buffer->obj_list_offset + load->current_buffer->buff);
14612 ibf_offset_t offset = offsets[object_index];
14613 const struct ibf_object_header header = ibf_load_object_object_header(load, &offset);
14616 fprintf(stderr,
"ibf_load_object: list=%#x offsets=%p offset=%#x\n",
14617 load->current_buffer->obj_list_offset, (
void *)offsets, offset);
14618 fprintf(stderr,
"ibf_load_object: type=%#x special=%d frozen=%d internal=%d\n",
14619 header.type, header.special_const, header.frozen, header.internal);
14621 if (offset >= load->current_buffer->size) {
14622 rb_raise(
rb_eIndexError,
"object offset out of range: %u", offset);
14625 if (header.special_const) {
14626 ibf_offset_t reading_pos = offset;
14628 obj = ibf_load_small_value(load, &reading_pos);
14631 obj = (*load_object_functions[header.type])(load, &header, offset);
14634 pinned_list_store(load->current_buffer->obj_list, (
long)object_index, obj);
14637 fprintf(stderr,
"ibf_load_object: index=%#"PRIxVALUE
" obj=%#"PRIxVALUE
"\n",
14638 object_index, obj);
14651ibf_dump_object_list_i(st_data_t key, st_data_t val, st_data_t ptr)
14656 ibf_offset_t offset = ibf_dump_object_object(args->dump, obj);
14659 return ST_CONTINUE;
14663ibf_dump_object_list(
struct ibf_dump *dump, ibf_offset_t *obj_list_offset,
unsigned int *obj_list_size)
14665 st_table *obj_table = dump->current_buffer->obj_table;
14670 args.offset_list = offset_list;
14672 st_foreach(obj_table, ibf_dump_object_list_i, (st_data_t)&args);
14674 IBF_W_ALIGN(ibf_offset_t);
14675 *obj_list_offset = ibf_dump_pos(dump);
14677 st_index_t size = obj_table->num_entries;
14680 for (i=0; i<size; i++) {
14685 *obj_list_size = (
unsigned int)size;
14689ibf_dump_mark(
void *ptr)
14692 rb_gc_mark(dump->global_buffer.str);
14694 rb_mark_set(dump->global_buffer.obj_table);
14695 rb_mark_set(dump->iseq_table);
14699ibf_dump_free(
void *ptr)
14702 if (dump->global_buffer.obj_table) {
14703 st_free_table(dump->global_buffer.obj_table);
14704 dump->global_buffer.obj_table = 0;
14706 if (dump->iseq_table) {
14707 st_free_table(dump->iseq_table);
14708 dump->iseq_table = 0;
14713ibf_dump_memsize(
const void *ptr)
14717 if (dump->iseq_table) size += st_memsize(dump->iseq_table);
14718 if (dump->global_buffer.obj_table) size += st_memsize(dump->global_buffer.obj_table);
14724 {ibf_dump_mark, ibf_dump_free, ibf_dump_memsize,},
14725 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_EMBEDDABLE
14731 dump->global_buffer.obj_table = NULL;
14732 dump->iseq_table = NULL;
14735 dump->global_buffer.obj_table = ibf_dump_object_table_new();
14736 dump->iseq_table = st_init_numtable();
14738 dump->current_buffer = &dump->global_buffer;
14749 if (ISEQ_BODY(iseq)->parent_iseq != NULL ||
14750 ISEQ_BODY(iseq)->local_iseq != iseq) {
14753 if (
RTEST(ISEQ_COVERAGE(iseq))) {
14758 ibf_dump_setup(dump, dump_obj);
14760 ibf_dump_write(dump, &header,
sizeof(header));
14761 ibf_dump_iseq(dump, iseq);
14763 header.magic[0] =
'Y';
14764 header.magic[1] =
'A';
14765 header.magic[2] =
'R';
14766 header.magic[3] =
'B';
14767 header.major_version = IBF_MAJOR_VERSION;
14768 header.minor_version = IBF_MINOR_VERSION;
14769 header.endian = IBF_ENDIAN_MARK;
14771 ibf_dump_iseq_list(dump, &header);
14772 ibf_dump_object_list(dump, &header.global_object_list_offset, &header.global_object_list_size);
14773 header.size = ibf_dump_pos(dump);
14776 VALUE opt_str = opt;
14779 ibf_dump_write(dump, ptr, header.extra_size);
14782 header.extra_size = 0;
14785 ibf_dump_overwrite(dump, &header,
sizeof(header), 0);
14787 str = dump->global_buffer.str;
14792static const ibf_offset_t *
14793ibf_iseq_list(
const struct ibf_load *load)
14795 return (
const ibf_offset_t *)(load->global_buffer.buff + load->header->iseq_list_offset);
14799rb_ibf_load_iseq_complete(
rb_iseq_t *iseq)
14803 ibf_offset_t offset = ibf_iseq_list(load)[iseq->aux.loader.index];
14806 fprintf(stderr,
"rb_ibf_load_iseq_complete: index=%#x offset=%#x size=%#x\n",
14807 iseq->aux.loader.index, offset,
14808 load->header->size);
14810 ibf_load_iseq_each(load, iseq, offset);
14811 ISEQ_COMPILE_DATA_CLEAR(iseq);
14813 rb_iseq_init_trace(iseq);
14814 load->iseq = prev_src_iseq;
14821 rb_ibf_load_iseq_complete((
rb_iseq_t *)iseq);
14829 int iseq_index = (int)(
VALUE)index_iseq;
14832 fprintf(stderr,
"ibf_load_iseq: index_iseq=%p iseq_list=%p\n",
14833 (
void *)index_iseq, (
void *)load->iseq_list);
14835 if (iseq_index == -1) {
14839 VALUE iseqv = pinned_list_fetch(load->iseq_list, iseq_index);
14842 fprintf(stderr,
"ibf_load_iseq: iseqv=%p\n", (
void *)iseqv);
14850 fprintf(stderr,
"ibf_load_iseq: new iseq=%p\n", (
void *)iseq);
14853 iseq->aux.loader.obj = load->loader_obj;
14854 iseq->aux.loader.index = iseq_index;
14856 fprintf(stderr,
"ibf_load_iseq: iseq=%p loader_obj=%p index=%d\n",
14857 (
void *)iseq, (
void *)load->loader_obj, iseq_index);
14859 pinned_list_store(load->iseq_list, iseq_index, (
VALUE)iseq);
14861 if (!USE_LAZY_LOAD || GET_VM()->builtin_function_table) {
14863 fprintf(stderr,
"ibf_load_iseq: loading iseq=%p\n", (
void *)iseq);
14865 rb_ibf_load_iseq_complete(iseq);
14869 fprintf(stderr,
"ibf_load_iseq: iseq=%p loaded %p\n",
14870 (
void *)iseq, (
void *)load->iseq);
14878ibf_load_setup_bytes(
struct ibf_load *load,
VALUE loader_obj,
const char *bytes,
size_t size)
14881 load->loader_obj = loader_obj;
14882 load->global_buffer.buff = bytes;
14883 load->header = header;
14884 load->global_buffer.size = header->size;
14885 load->global_buffer.obj_list_offset = header->global_object_list_offset;
14886 load->global_buffer.obj_list_size = header->global_object_list_size;
14887 RB_OBJ_WRITE(loader_obj, &load->iseq_list, pinned_list_new(header->iseq_list_size));
14888 RB_OBJ_WRITE(loader_obj, &load->global_buffer.obj_list, pinned_list_new(load->global_buffer.obj_list_size));
14891 load->current_buffer = &load->global_buffer;
14893 if (size < header->size) {
14896 if (strncmp(header->magic,
"YARB", 4) != 0) {
14899 if (header->major_version != IBF_MAJOR_VERSION ||
14900 header->minor_version != IBF_MINOR_VERSION) {
14902 header->major_version, header->minor_version, IBF_MAJOR_VERSION, IBF_MINOR_VERSION);
14904 if (header->endian != IBF_ENDIAN_MARK) {
14910 if (header->iseq_list_offset %
RUBY_ALIGNOF(ibf_offset_t)) {
14911 rb_raise(rb_eArgError,
"unaligned iseq list offset: %u",
14912 header->iseq_list_offset);
14914 if (load->global_buffer.obj_list_offset %
RUBY_ALIGNOF(ibf_offset_t)) {
14915 rb_raise(rb_eArgError,
"unaligned object list offset: %u",
14916 load->global_buffer.obj_list_offset);
14929 if (USE_LAZY_LOAD) {
14930 str =
rb_str_new(RSTRING_PTR(str), RSTRING_LEN(str));
14933 ibf_load_setup_bytes(load, loader_obj, RSTRING_PTR(str), RSTRING_LEN(str));
14938ibf_loader_mark(
void *ptr)
14941 rb_gc_mark(load->str);
14942 rb_gc_mark(load->iseq_list);
14943 rb_gc_mark(load->global_buffer.obj_list);
14947ibf_loader_free(
void *ptr)
14954ibf_loader_memsize(
const void *ptr)
14961 {ibf_loader_mark, ibf_loader_free, ibf_loader_memsize,},
14962 0, 0, RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_FREE_IMMEDIATELY
14966rb_iseq_ibf_load(
VALUE str)
14972 ibf_load_setup(load, loader_obj, str);
14973 iseq = ibf_load_iseq(load, 0);
14980rb_iseq_ibf_load_bytes(
const char *bytes,
size_t size)
14986 ibf_load_setup_bytes(load, loader_obj, bytes, size);
14987 iseq = ibf_load_iseq(load, 0);
14994rb_iseq_ibf_load_extra_data(
VALUE str)
15000 ibf_load_setup(load, loader_obj, str);
15001 extra_str =
rb_str_new(load->global_buffer.buff + load->header->size, load->header->extra_size);
15006#include "prism_compile.c"
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ALIGNOF
Wraps (or simulates) alignof.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_NONE
No events.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
#define rb_str_new2
Old name of rb_str_new_cstr.
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
#define TYPE(_)
Old name of rb_type.
#define NUM2ULONG
Old name of RB_NUM2ULONG.
#define NUM2LL
Old name of RB_NUM2LL.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOCV
Old name of RB_ALLOCV.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define xfree
Old name of ruby_xfree.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define OBJ_FROZEN
Old name of RB_OBJ_FROZEN.
#define rb_str_cat2
Old name of rb_str_cat_cstr.
#define T_NIL
Old name of RUBY_T_NIL.
#define UNREACHABLE
Old name of RBIMPL_UNREACHABLE.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define OBJ_FREEZE
Old name of RB_OBJ_FREEZE.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define SYM2ID
Old name of RB_SYM2ID.
#define FIX2UINT
Old name of RB_FIX2UINT.
#define ZALLOC
Old name of RB_ZALLOC.
#define CLASS_OF
Old name of rb_class_of.
#define FIXABLE
Old name of RB_FIXABLE.
#define xmalloc
Old name of ruby_xmalloc.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define NUM2UINT
Old name of RB_NUM2UINT.
#define ZALLOC_N
Old name of RB_ZALLOC_N.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_SET
Old name of RB_FL_SET.
#define FLONUM_P
Old name of RB_FLONUM_P.
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define NUM2LONG
Old name of RB_NUM2LONG.
#define FL_UNSET
Old name of RB_FL_UNSET.
#define UINT2NUM
Old name of RB_UINT2NUM.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define CONST_ID
Old name of RUBY_CONST_ID.
#define ALLOCV_END
Old name of RB_ALLOCV_END.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
#define T_REGEXP
Old name of RUBY_T_REGEXP.
#define ruby_debug
This variable controls whether the interpreter is in debug mode.
VALUE rb_eNotImpError
NotImplementedError exception.
VALUE rb_eStandardError
StandardError exception.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eNoMatchingPatternError
NoMatchingPatternError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
VALUE rb_eNoMatchingPatternKeyError
NoMatchingPatternKeyError exception.
VALUE rb_eIndexError
IndexError exception.
VALUE rb_eSyntaxError
SyntaxError exception.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_obj_reveal(VALUE obj, VALUE klass)
Make a hidden object visible again.
VALUE rb_cArray
Array class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
VALUE rb_cHash
Hash class.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cRange
Range class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Defines RBIMPL_HAS_BUILTIN.
VALUE rb_ary_reverse(VALUE ary)
Destructively reverses the passed array in-place.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_clear(VALUE ary)
Destructively removes everything form an array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
VALUE rb_ary_join(VALUE ary, VALUE sep)
Recursively stringises the elements of the passed array, flattens that result, then joins the sequenc...
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define INTEGER_PACK_NATIVE_BYTE_ORDER
Means either INTEGER_PACK_MSBYTE_FIRST or INTEGER_PACK_LSBYTE_FIRST, depending on the host processor'...
#define INTEGER_PACK_NEGATIVE
Interprets the input as a signed negative number (unpack only).
#define INTEGER_PACK_LSWORD_FIRST
Stores/interprets the least significant word as the first word.
int rb_is_const_id(ID id)
Classifies the given ID, then sees if it is a constant.
int rb_is_attrset_id(ID id)
Classifies the given ID, then sees if it is an attribute writer.
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
VALUE rb_rational_new(VALUE num, VALUE den)
Constructs a Rational, with reduction.
int rb_reg_options(VALUE re)
Queries the options of the passed regular expression.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_tmp_new(long len)
Allocates a "temporary" string.
int rb_str_hash_cmp(VALUE str1, VALUE str2)
Compares two strings.
#define rb_str_new(str, len)
Allocates an instance of rb_cString.
st_index_t rb_str_hash(VALUE str)
Calculates a hash value of a string.
VALUE rb_str_cat(VALUE dst, const char *src, long srclen)
Destructively appends the passed contents to the string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
int rb_str_cmp(VALUE lhs, VALUE rhs)
Compares two strings, as in strcmp(3).
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
VALUE rb_str_freeze(VALUE str)
This is the implementation of String#freeze.
#define rb_str_new_cstr(str)
Identical to rb_str_new, except it assumes the passed pointer is a pointer to a C string.
VALUE rb_class_name(VALUE obj)
Queries the name of the given object's class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
VALUE rb_id2sym(ID id)
Allocates an instance of rb_cSymbol that has the given id.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
ID rb_sym2id(VALUE obj)
Converts an instance of rb_cSymbol into an ID.
int len
Length of the buffer.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_ractor_make_shareable(VALUE obj)
Destructively transforms the passed object so that multiple Ractors can share it.
#define DECIMAL_SIZE_OF(expr)
An approximation of decimal representation size.
void ruby_qsort(void *, const size_t, const size_t, int(*)(const void *, const void *, void *), void *)
Reentrant implementation of quick sort.
#define rb_long2int
Just another name of rb_long2int_inline.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define MEMZERO(p, type, n)
Handy macro to erase a region of memory.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define RB_ALLOCV(v, n)
Identical to RB_ALLOCV_N(), except that it allocates a number of bytes and returns a void* .
VALUE type(ANYARGS)
ANYARGS-ed function type.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
#define RBIMPL_ATTR_NORETURN()
Wraps (or simulates) [[noreturn]]
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static void RARRAY_ASET(VALUE ary, long i, VALUE v)
Assigns an object in an array.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RUBY_DEFAULT_FREE
This is a value you can set to RData::dfree.
void(* RUBY_DATA_FUNC)(void *)
This is the type of callbacks registered to RData.
#define RHASH_SIZE(h)
Queries the size of the hash.
static VALUE RREGEXP_SRC(VALUE rexp)
Convenient getter function.
#define StringValue(v)
Ensures that the parameter object is a String.
#define StringValuePtr(v)
Identical to StringValue, except it returns a char*.
static int RSTRING_LENINT(VALUE str)
Identical to RSTRING_LEN(), except it differs for the return type.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RTYPEDDATA_DATA(v)
Convenient getter macro.
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
#define TypedData_Wrap_Struct(klass, data_type, sval)
Converts sval, a pointer to your struct, into a Ruby object.
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
void rb_p(VALUE obj)
Inspects an object.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Internal header for Complex.
Internal header for Rational.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
This is the struct that holds necessary info for a struct.
const char * wrap_struct_name
Name of structs of this kind.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool rb_integer_type_p(VALUE obj)
Queries if the object is an instance of rb_cInteger.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
@ RUBY_T_MASK
Bitmask of ruby_value_type.