12 #include "ruby/internal/config.h"
22 #include "internal/array.h"
23 #include "internal/compile.h"
24 #include "internal/complex.h"
25 #include "internal/encoding.h"
26 #include "internal/error.h"
27 #include "internal/gc.h"
28 #include "internal/hash.h"
29 #include "internal/io.h"
30 #include "internal/numeric.h"
31 #include "internal/object.h"
32 #include "internal/rational.h"
33 #include "internal/re.h"
34 #include "internal/ruby_parser.h"
35 #include "internal/symbol.h"
36 #include "internal/thread.h"
37 #include "internal/variable.h"
43 #include "vm_callinfo.h"
49 #include "insns_info.inc"
51 #define FIXNUM_INC(n, i) ((n)+(INT2FIX(i)&~FIXNUM_FLAG))
52 #define FIXNUM_OR(n, i) ((n)|INT2FIX(i))
86 unsigned int rescued: 2;
87 unsigned int unremovable: 1;
92 enum ruby_vminsn_type insn_id;
122 const void *ensure_node;
127 const ID rb_iseq_shared_exc_local_tbl[] = {idERROR_INFO};
147 #define compile_debug CPDEBUG
149 #define compile_debug ISEQ_COMPILE_DATA(iseq)->option->debug_level
154 #define compile_debug_print_indent(level) \
155 ruby_debug_print_indent((level), compile_debug, gl_node_level * 2)
157 #define debugp(header, value) (void) \
158 (compile_debug_print_indent(1) && \
159 ruby_debug_print_value(1, compile_debug, (header), (value)))
161 #define debugi(header, id) (void) \
162 (compile_debug_print_indent(1) && \
163 ruby_debug_print_id(1, compile_debug, (header), (id)))
165 #define debugp_param(header, value) (void) \
166 (compile_debug_print_indent(1) && \
167 ruby_debug_print_value(1, compile_debug, (header), (value)))
169 #define debugp_verbose(header, value) (void) \
170 (compile_debug_print_indent(2) && \
171 ruby_debug_print_value(2, compile_debug, (header), (value)))
173 #define debugp_verbose_node(header, value) (void) \
174 (compile_debug_print_indent(10) && \
175 ruby_debug_print_value(10, compile_debug, (header), (value)))
177 #define debug_node_start(node) ((void) \
178 (compile_debug_print_indent(1) && \
179 (ruby_debug_print_node(1, CPDEBUG, "", (const NODE *)(node)), gl_node_level)), \
182 #define debug_node_end() gl_node_level --
186 #define debugi(header, id) ((void)0)
187 #define debugp(header, value) ((void)0)
188 #define debugp_verbose(header, value) ((void)0)
189 #define debugp_verbose_node(header, value) ((void)0)
190 #define debugp_param(header, value) ((void)0)
191 #define debug_node_start(node) ((void)0)
192 #define debug_node_end() ((void)0)
195 #if CPDEBUG > 1 || CPDEBUG < 0
197 #define printf ruby_debug_printf
198 #define debugs if (compile_debug_print_indent(1)) ruby_debug_printf
199 #define debug_compile(msg, v) ((void)(compile_debug_print_indent(1) && fputs((msg), stderr)), (v))
201 #define debugs if(0)printf
202 #define debug_compile(msg, v) (v)
205 #define LVAR_ERRINFO (1)
208 #define NEW_LABEL(l) new_label_body(iseq, (l))
209 #define LABEL_FORMAT "<L%03d>"
211 #define NEW_ISEQ(node, name, type, line_no) \
212 new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
214 #define NEW_CHILD_ISEQ(node, name, type, line_no) \
215 new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
218 #define ADD_SEQ(seq1, seq2) \
219 APPEND_LIST((seq1), (seq2))
222 #define ADD_INSN(seq, line_node, insn) \
223 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 0))
226 #define ADD_SYNTHETIC_INSN(seq, line_no, node_id, insn) \
227 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (line_no), (node_id), BIN(insn), 0))
230 #define INSERT_BEFORE_INSN(next, line_no, node_id, insn) \
231 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
234 #define INSERT_AFTER_INSN(prev, line_no, node_id, insn) \
235 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
238 #define ADD_INSN1(seq, line_node, insn, op1) \
239 ADD_ELEM((seq), (LINK_ELEMENT *) \
240 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 1, (VALUE)(op1)))
243 #define INSERT_BEFORE_INSN1(next, line_no, node_id, insn, op1) \
244 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) \
245 new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
248 #define INSERT_AFTER_INSN1(prev, line_no, node_id, insn, op1) \
249 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) \
250 new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
252 #define LABEL_REF(label) ((label)->refcnt++)
255 #define ADD_INSNL(seq, line_node, insn, label) (ADD_INSN1(seq, line_node, insn, label), LABEL_REF(label))
257 #define ADD_INSN2(seq, line_node, insn, op1, op2) \
258 ADD_ELEM((seq), (LINK_ELEMENT *) \
259 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
261 #define ADD_INSN3(seq, line_node, insn, op1, op2, op3) \
262 ADD_ELEM((seq), (LINK_ELEMENT *) \
263 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
266 #define ADD_SEND(seq, line_node, id, argc) \
267 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
269 #define ADD_SEND_WITH_FLAG(seq, line_node, id, argc, flag) \
270 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)(flag), NULL)
272 #define ADD_SEND_WITH_BLOCK(seq, line_node, id, argc, block) \
273 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
275 #define ADD_CALL_RECEIVER(seq, line_node) \
276 ADD_INSN((seq), (line_node), putself)
278 #define ADD_CALL(seq, line_node, id, argc) \
279 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
281 #define ADD_CALL_WITH_BLOCK(seq, line_node, id, argc, block) \
282 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
284 #define ADD_SEND_R(seq, line_node, id, argc, block, flag, keywords) \
285 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
287 #define ADD_TRACE(seq, event) \
288 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), 0))
289 #define ADD_TRACE_WITH_DATA(seq, event, data) \
290 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), (data)))
292 static void iseq_add_getlocal(
rb_iseq_t *iseq,
LINK_ANCHOR *
const seq,
const NODE *
const line_node,
int idx,
int level);
293 static void iseq_add_setlocal(
rb_iseq_t *iseq,
LINK_ANCHOR *
const seq,
const NODE *
const line_node,
int idx,
int level);
295 #define ADD_GETLOCAL(seq, line_node, idx, level) iseq_add_getlocal(iseq, (seq), (line_node), (idx), (level))
296 #define ADD_SETLOCAL(seq, line_node, idx, level) iseq_add_setlocal(iseq, (seq), (line_node), (idx), (level))
299 #define ADD_LABEL(seq, label) \
300 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
302 #define APPEND_LABEL(seq, before, label) \
303 APPEND_ELEM((seq), (before), (LINK_ELEMENT *) (label))
305 #define ADD_ADJUST(seq, line_node, label) \
306 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), nd_line(line_node)))
308 #define ADD_ADJUST_RESTORE(seq, label) \
309 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
311 #define LABEL_UNREMOVABLE(label) \
312 ((label) ? (LABEL_REF(label), (label)->unremovable=1) : 0)
313 #define ADD_CATCH_ENTRY(type, ls, le, iseqv, lc) do { \
314 VALUE _e = rb_ary_new3(5, (type), \
315 (VALUE)(ls) | 1, (VALUE)(le) | 1, \
316 (VALUE)(iseqv), (VALUE)(lc) | 1); \
317 LABEL_UNREMOVABLE(ls); \
320 if (NIL_P(ISEQ_COMPILE_DATA(iseq)->catch_table_ary)) \
321 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, rb_ary_hidden_new(3)); \
322 rb_ary_push(ISEQ_COMPILE_DATA(iseq)->catch_table_ary, freeze_hide_obj(_e)); \
326 #define COMPILE(anchor, desc, node) \
327 (debug_compile("== " desc "\n", \
328 iseq_compile_each(iseq, (anchor), (node), 0)))
331 #define COMPILE_POPPED(anchor, desc, node) \
332 (debug_compile("== " desc "\n", \
333 iseq_compile_each(iseq, (anchor), (node), 1)))
336 #define COMPILE_(anchor, desc, node, popped) \
337 (debug_compile("== " desc "\n", \
338 iseq_compile_each(iseq, (anchor), (node), (popped))))
340 #define COMPILE_RECV(anchor, desc, node, recv) \
341 (private_recv_p(node) ? \
342 (ADD_INSN(anchor, node, putself), VM_CALL_FCALL) : \
343 COMPILE(anchor, desc, recv) ? 0 : -1)
345 #define OPERAND_AT(insn, idx) \
346 (((INSN*)(insn))->operands[(idx)])
348 #define INSN_OF(insn) \
349 (((INSN*)(insn))->insn_id)
351 #define IS_INSN(link) ((link)->type == ISEQ_ELEMENT_INSN)
352 #define IS_LABEL(link) ((link)->type == ISEQ_ELEMENT_LABEL)
353 #define IS_ADJUST(link) ((link)->type == ISEQ_ELEMENT_ADJUST)
354 #define IS_TRACE(link) ((link)->type == ISEQ_ELEMENT_TRACE)
355 #define IS_INSN_ID(iobj, insn) (INSN_OF(iobj) == BIN(insn))
356 #define IS_NEXT_INSN_ID(link, insn) \
357 ((link)->next && IS_INSN((link)->next) && IS_INSN_ID((link)->next, insn))
365 append_compile_error(const
rb_iseq_t *iseq,
int line, const
char *fmt, ...)
367 VALUE err_info = ISEQ_COMPILE_DATA(iseq)->err_info;
368 VALUE file = rb_iseq_path(iseq);
373 err = rb_syntax_error_append(err, file, line, -1, NULL, fmt, args);
375 if (
NIL_P(err_info)) {
376 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->err_info, err);
379 else if (!err_info) {
390 compile_bug(
rb_iseq_t *iseq,
int line,
const char *fmt, ...)
394 rb_report_bug_valist(rb_iseq_path(iseq), line, fmt, args);
400 #define COMPILE_ERROR append_compile_error
402 #define ERROR_ARGS_AT(n) iseq, nd_line(n),
403 #define ERROR_ARGS ERROR_ARGS_AT(node)
405 #define EXPECT_NODE(prefix, node, ndtype, errval) \
407 const NODE *error_node = (node); \
408 enum node_type error_type = nd_type(error_node); \
409 if (error_type != (ndtype)) { \
410 COMPILE_ERROR(ERROR_ARGS_AT(error_node) \
411 prefix ": " #ndtype " is expected, but %s", \
412 ruby_node_name(error_type)); \
417 #define EXPECT_NODE_NONULL(prefix, parent, ndtype, errval) \
419 COMPILE_ERROR(ERROR_ARGS_AT(parent) \
420 prefix ": must be " #ndtype ", but 0"); \
424 #define UNKNOWN_NODE(prefix, node, errval) \
426 const NODE *error_node = (node); \
427 COMPILE_ERROR(ERROR_ARGS_AT(error_node) prefix ": unknown node (%s)", \
428 ruby_node_name(nd_type(error_node))); \
435 #define CHECK(sub) if (!(sub)) {BEFORE_RETURN;return COMPILE_NG;}
436 #define NO_CHECK(sub) (void)(sub)
437 #define BEFORE_RETURN
441 #define DECL_ANCHOR(name) \
442 LINK_ANCHOR name[1] = {{{ISEQ_ELEMENT_ANCHOR,},}}
443 #define INIT_ANCHOR(name) \
444 (name->last = &name->anchor)
447 freeze_hide_obj(
VALUE obj)
450 RBASIC_CLEAR_CLASS(obj);
454 #include "optinsn.inc"
455 #if OPT_INSTRUCTIONS_UNIFICATION
456 #include "optunifs.inc"
461 #define ISEQ_ARG iseq,
462 #define ISEQ_ARG_DECLARE rb_iseq_t *iseq,
465 #define ISEQ_ARG_DECLARE
469 #define gl_node_level ISEQ_COMPILE_DATA(iseq)->node_level
475 static int insn_data_length(
INSN *iobj);
476 static int calc_sp_depth(
int depth,
INSN *iobj);
478 static INSN *new_insn_body(
rb_iseq_t *iseq,
int line_no,
int node_id,
enum ruby_vminsn_type insn_id,
int argc, ...);
491 static int iseq_set_exception_local_table(
rb_iseq_t *iseq);
495 static int iseq_set_exception_table(
rb_iseq_t *iseq);
496 static int iseq_set_optargs_table(
rb_iseq_t *iseq);
499 static int compile_hash(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *node,
int method_call_keywords,
int popped);
506 verify_list(ISEQ_ARG_DECLARE
const char *info,
LINK_ANCHOR *
const anchor)
512 if (!compile_debug)
return;
514 list = anchor->anchor.next;
515 plist = &anchor->anchor;
517 if (plist != list->prev) {
524 if (anchor->last != plist && anchor->last != 0) {
529 rb_bug(
"list verify error: %08x (%s)", flag, info);
534 #define verify_list(info, anchor) verify_list(iseq, (info), (anchor))
541 VALUE *original = rb_iseq_original_iseq(iseq);
543 while (i < ISEQ_BODY(iseq)->iseq_size) {
544 VALUE insn = original[i];
545 const char *types = insn_op_types(insn);
547 for (
int j=0; types[j]; j++) {
548 if (types[j] == TS_CALLDATA) {
552 if (cc != vm_cc_empty()) {
554 rb_bug(
"call cache is not initialized by vm_cc_empty()");
561 for (
unsigned int i=0; i<ISEQ_BODY(iseq)->ci_size; i++) {
562 struct rb_call_data *cd = &ISEQ_BODY(iseq)->call_data[i];
565 if (cc != NULL && cc != vm_cc_empty()) {
567 rb_bug(
"call cache is not initialized by vm_cc_empty()");
579 elem->prev = anchor->last;
580 anchor->last->next = elem;
582 verify_list(
"add", anchor);
592 elem->next = before->next;
593 elem->next->prev = elem;
595 if (before == anchor->last) anchor->last = elem;
596 verify_list(
"add", anchor);
599 #define ADD_ELEM(anchor, elem) ADD_ELEM(iseq, (anchor), (elem))
600 #define APPEND_ELEM(anchor, before, elem) APPEND_ELEM(iseq, (anchor), (before), (elem))
604 branch_coverage_valid_p(
rb_iseq_t *iseq,
int first_line)
606 if (!ISEQ_COVERAGE(iseq))
return 0;
607 if (!ISEQ_BRANCH_COVERAGE(iseq))
return 0;
608 if (first_line <= 0)
return 0;
612 #define PTR2NUM(x) (rb_int2inum((intptr_t)(void *)(x)))
617 const int first_lineno = loc->beg_pos.lineno, first_column = loc->beg_pos.column;
618 const int last_lineno = loc->end_pos.lineno, last_column = loc->end_pos.column;
633 if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno))
return Qundef;
647 if (
NIL_P(branch_base)) {
648 branch_base = setup_branch(loc,
type, structure, key);
661 generate_dummy_line_node(
int lineno,
int node_id)
664 nd_set_line(&dummy, lineno);
665 nd_set_node_id(&dummy, node_id);
672 if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno))
return;
687 branch = setup_branch(loc,
type, branches, key);
697 ADD_TRACE_WITH_DATA(seq, RUBY_EVENT_COVERAGE_BRANCH, counter_idx);
698 ADD_SYNTHETIC_INSN(seq, loc->end_pos.lineno, node_id, nop);
701 #define ISEQ_LAST_LINE(iseq) (ISEQ_COMPILE_DATA(iseq)->last_line)
704 validate_label(st_data_t name, st_data_t label, st_data_t arg)
708 if (!lobj->link.next) {
710 COMPILE_ERROR(iseq, lobj->position,
711 "%"PRIsVALUE
": undefined label",
721 st_foreach(labels_table, validate_label, (st_data_t)iseq);
722 st_free_table(labels_table);
726 get_nd_recv(
const NODE *node)
728 switch (nd_type(node)) {
730 return RNODE_CALL(node)->nd_recv;
732 return RNODE_OPCALL(node)->nd_recv;
736 return RNODE_QCALL(node)->nd_recv;
740 return RNODE_ATTRASGN(node)->nd_recv;
742 return RNODE_OP_ASGN1(node)->nd_recv;
744 return RNODE_OP_ASGN2(node)->nd_recv;
746 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
751 get_node_call_nd_mid(
const NODE *node)
753 switch (nd_type(node)) {
755 return RNODE_CALL(node)->nd_mid;
757 return RNODE_OPCALL(node)->nd_mid;
759 return RNODE_FCALL(node)->nd_mid;
761 return RNODE_QCALL(node)->nd_mid;
763 return RNODE_VCALL(node)->nd_mid;
765 return RNODE_ATTRASGN(node)->nd_mid;
767 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
772 get_nd_args(
const NODE *node)
774 switch (nd_type(node)) {
776 return RNODE_CALL(node)->nd_args;
778 return RNODE_OPCALL(node)->nd_args;
780 return RNODE_FCALL(node)->nd_args;
782 return RNODE_QCALL(node)->nd_args;
786 return RNODE_ATTRASGN(node)->nd_args;
788 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
793 get_node_colon_nd_mid(
const NODE *node)
795 switch (nd_type(node)) {
797 return RNODE_COLON2(node)->nd_mid;
799 return RNODE_COLON3(node)->nd_mid;
801 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
806 get_nd_vid(
const NODE *node)
808 switch (nd_type(node)) {
810 return RNODE_LASGN(node)->nd_vid;
812 return RNODE_DASGN(node)->nd_vid;
814 return RNODE_IASGN(node)->nd_vid;
816 return RNODE_CVASGN(node)->nd_vid;
818 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
823 get_nd_value(
const NODE *node)
825 switch (nd_type(node)) {
827 return RNODE_LASGN(node)->nd_value;
829 return RNODE_DASGN(node)->nd_value;
831 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
836 get_string_value(
const NODE *node)
838 switch (nd_type(node)) {
840 return rb_node_str_string_val(node);
842 return rb_node_file_path_val(node);
844 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
854 (*ifunc->func)(iseq, ret, ifunc->data);
856 ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
858 CHECK(iseq_setup_insn(iseq, ret));
859 return iseq_setup(iseq, ret);
862 static bool drop_unreachable_return(
LINK_ANCHOR *ret);
871 NO_CHECK(COMPILE(ret,
"nil", node));
872 iseq_set_local_table(iseq, 0, 0);
875 else if (nd_type_p(node, NODE_SCOPE)) {
877 iseq_set_local_table(iseq, RNODE_SCOPE(node)->nd_tbl, (
NODE *)RNODE_SCOPE(node)->nd_args);
878 iseq_set_arguments(iseq, ret, (
NODE *)RNODE_SCOPE(node)->nd_args);
880 switch (ISEQ_BODY(iseq)->
type) {
881 case ISEQ_TYPE_BLOCK:
883 LABEL *start = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(0);
884 LABEL *end = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(0);
886 start->rescued = LABEL_RESCUE_BEG;
887 end->rescued = LABEL_RESCUE_END;
890 ADD_SYNTHETIC_INSN(ret, ISEQ_BODY(iseq)->location.first_lineno, -1, nop);
891 ADD_LABEL(ret, start);
892 CHECK(COMPILE(ret,
"block body", RNODE_SCOPE(node)->nd_body));
895 ISEQ_COMPILE_DATA(iseq)->last_line = ISEQ_BODY(iseq)->location.code_location.end_pos.lineno;
898 ADD_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
899 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, start, end, NULL, end);
902 case ISEQ_TYPE_CLASS:
905 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
907 ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
910 case ISEQ_TYPE_METHOD:
912 ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
914 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
915 ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
917 ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
921 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
928 #define INVALID_ISEQ_TYPE(type) \
929 ISEQ_TYPE_##type: m = #type; goto invalid_iseq_type
930 switch (ISEQ_BODY(iseq)->
type) {
931 case INVALID_ISEQ_TYPE(
METHOD);
932 case INVALID_ISEQ_TYPE(CLASS);
933 case INVALID_ISEQ_TYPE(BLOCK);
934 case INVALID_ISEQ_TYPE(EVAL);
935 case INVALID_ISEQ_TYPE(MAIN);
936 case INVALID_ISEQ_TYPE(TOP);
937 #undef INVALID_ISEQ_TYPE
938 case ISEQ_TYPE_RESCUE:
939 iseq_set_exception_local_table(iseq);
940 CHECK(COMPILE(ret,
"rescue", node));
942 case ISEQ_TYPE_ENSURE:
943 iseq_set_exception_local_table(iseq);
944 CHECK(COMPILE_POPPED(ret,
"ensure", node));
946 case ISEQ_TYPE_PLAIN:
947 CHECK(COMPILE(ret,
"ensure", node));
950 COMPILE_ERROR(ERROR_ARGS
"unknown scope: %d", ISEQ_BODY(iseq)->
type);
953 COMPILE_ERROR(ERROR_ARGS
"compile/ISEQ_TYPE_%s should not be reached", m);
958 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE || ISEQ_BODY(iseq)->
type == ISEQ_TYPE_ENSURE) {
959 NODE dummy_line_node = generate_dummy_line_node(0, -1);
960 ADD_GETLOCAL(ret, &dummy_line_node, LVAR_ERRINFO, 0);
961 ADD_INSN1(ret, &dummy_line_node,
throw,
INT2FIX(0) );
963 else if (!drop_unreachable_return(ret)) {
964 ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
968 if (ISEQ_COMPILE_DATA(iseq)->labels_table) {
969 st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
970 ISEQ_COMPILE_DATA(iseq)->labels_table = 0;
971 validate_labels(iseq, labels_table);
974 CHECK(iseq_setup_insn(iseq, ret));
975 return iseq_setup(iseq, ret);
979 rb_iseq_translate_threaded_code(
rb_iseq_t *iseq)
981 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
982 const void *
const *table = rb_vm_get_insns_address_table();
984 VALUE *encoded = (
VALUE *)ISEQ_BODY(iseq)->iseq_encoded;
986 for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; ) {
987 int insn = (int)ISEQ_BODY(iseq)->iseq_encoded[i];
988 int len = insn_len(insn);
989 encoded[i] = (
VALUE)table[insn];
996 rb_yjit_live_iseq_count++;
997 rb_yjit_iseq_alloc_count++;
1004 rb_iseq_original_iseq(
const rb_iseq_t *iseq)
1006 VALUE *original_code;
1008 if (ISEQ_ORIGINAL_ISEQ(iseq))
return ISEQ_ORIGINAL_ISEQ(iseq);
1009 original_code = ISEQ_ORIGINAL_ISEQ_ALLOC(iseq, ISEQ_BODY(iseq)->iseq_size);
1010 MEMCPY(original_code, ISEQ_BODY(iseq)->iseq_encoded,
VALUE, ISEQ_BODY(iseq)->iseq_size);
1012 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
1016 for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; ) {
1017 const void *addr = (
const void *)original_code[i];
1018 const int insn = rb_vm_insn_addr2insn(addr);
1020 original_code[i] = insn;
1021 i += insn_len(insn);
1025 return original_code;
1038 #if defined(__sparc) && SIZEOF_VOIDP == 4 && defined(__GNUC__)
1039 #define STRICT_ALIGNMENT
1045 #if defined(__OpenBSD__)
1046 #include <sys/endian.h>
1047 #ifdef __STRICT_ALIGNMENT
1048 #define STRICT_ALIGNMENT
1052 #ifdef STRICT_ALIGNMENT
1053 #if defined(HAVE_TRUE_LONG_LONG) && SIZEOF_LONG_LONG > SIZEOF_VALUE
1054 #define ALIGNMENT_SIZE SIZEOF_LONG_LONG
1056 #define ALIGNMENT_SIZE SIZEOF_VALUE
1058 #define PADDING_SIZE_MAX ((size_t)((ALIGNMENT_SIZE) - 1))
1059 #define ALIGNMENT_SIZE_MASK PADDING_SIZE_MAX
1062 #define PADDING_SIZE_MAX 0
1065 #ifdef STRICT_ALIGNMENT
1068 calc_padding(
void *
ptr,
size_t size)
1073 mis = (size_t)
ptr & ALIGNMENT_SIZE_MASK;
1075 padding = ALIGNMENT_SIZE - mis;
1081 #if ALIGNMENT_SIZE > SIZEOF_VALUE
1082 if (size ==
sizeof(
VALUE) && padding ==
sizeof(
VALUE)) {
1096 #ifdef STRICT_ALIGNMENT
1097 size_t padding = calc_padding((
void *)&storage->buff[storage->pos], size);
1099 const size_t padding = 0;
1103 if (storage->pos + size + padding > storage->size) {
1104 unsigned int alloc_size = storage->size;
1106 while (alloc_size < size + PADDING_SIZE_MAX) {
1110 storage->next = (
void *)
ALLOC_N(
char, alloc_size +
1112 storage = *arena = storage->next;
1115 storage->size = alloc_size;
1116 #ifdef STRICT_ALIGNMENT
1117 padding = calc_padding((
void *)&storage->buff[storage->pos], size);
1121 #ifdef STRICT_ALIGNMENT
1122 storage->pos += (int)padding;
1125 ptr = (
void *)&storage->buff[storage->pos];
1126 storage->pos += (
int)size;
1131 compile_data_alloc(
rb_iseq_t *iseq,
size_t size)
1134 return compile_data_alloc_with_arena(arena, size);
1137 static inline void *
1138 compile_data_alloc2(
rb_iseq_t *iseq,
size_t x,
size_t y)
1141 return compile_data_alloc(iseq, size);
1144 static inline void *
1145 compile_data_calloc2(
rb_iseq_t *iseq,
size_t x,
size_t y)
1148 void *p = compile_data_alloc(iseq, size);
1154 compile_data_alloc_insn(
rb_iseq_t *iseq)
1157 return (
INSN *)compile_data_alloc_with_arena(arena,
sizeof(
INSN));
1161 compile_data_alloc_label(
rb_iseq_t *iseq)
1163 return (
LABEL *)compile_data_alloc(iseq,
sizeof(
LABEL));
1167 compile_data_alloc_adjust(
rb_iseq_t *iseq)
1169 return (
ADJUST *)compile_data_alloc(iseq,
sizeof(
ADJUST));
1173 compile_data_alloc_trace(
rb_iseq_t *iseq)
1175 return (
TRACE *)compile_data_alloc(iseq,
sizeof(
TRACE));
1184 elem2->next = elem1->next;
1185 elem2->prev = elem1;
1186 elem1->next = elem2;
1188 elem2->next->prev = elem2;
1198 elem2->prev = elem1->prev;
1199 elem2->next = elem1;
1200 elem1->prev = elem2;
1202 elem2->prev->next = elem2;
1212 elem2->prev = elem1->prev;
1213 elem2->next = elem1->next;
1215 elem1->prev->next = elem2;
1218 elem1->next->prev = elem2;
1225 elem->prev->next = elem->next;
1227 elem->next->prev = elem->prev;
1234 return anchor->anchor.next;
1240 return anchor->last;
1247 switch (elem->type) {
1248 case ISEQ_ELEMENT_INSN:
1249 case ISEQ_ELEMENT_ADJUST:
1259 LIST_INSN_SIZE_ONE(
const LINK_ANCHOR *
const anchor)
1261 LINK_ELEMENT *first_insn = ELEM_FIRST_INSN(FIRST_ELEMENT(anchor));
1262 if (first_insn != NULL &&
1263 ELEM_FIRST_INSN(first_insn->next) == NULL) {
1272 LIST_INSN_SIZE_ZERO(
const LINK_ANCHOR *
const anchor)
1274 if (ELEM_FIRST_INSN(FIRST_ELEMENT(anchor)) == NULL) {
1292 if (anc2->anchor.next) {
1293 anc1->last->next = anc2->anchor.next;
1294 anc2->anchor.next->prev = anc1->last;
1295 anc1->last = anc2->last;
1297 verify_list(
"append", anc1);
1300 #define APPEND_LIST(anc1, anc2) APPEND_LIST(iseq, (anc1), (anc2))
1309 printf(
"anch: %p, frst: %p, last: %p\n", (
void *)&anchor->anchor,
1310 (
void *)anchor->anchor.next, (
void *)anchor->last);
1312 printf(
"curr: %p, next: %p, prev: %p, type: %d\n", (
void *)list, (
void *)list->next,
1313 (
void *)list->prev, (
int)list->type);
1318 dump_disasm_list_with_cursor(anchor->anchor.next, cur, 0);
1319 verify_list(
"debug list", anchor);
1322 #define debug_list(anc, cur) debug_list(iseq, (anc), (cur))
1325 #define debug_list(anc, cur) ((void)0)
1331 TRACE *trace = compile_data_alloc_trace(iseq);
1333 trace->link.type = ISEQ_ELEMENT_TRACE;
1334 trace->link.next = NULL;
1335 trace->event = event;
1342 new_label_body(
rb_iseq_t *iseq,
long line)
1344 LABEL *labelobj = compile_data_alloc_label(iseq);
1346 labelobj->link.type = ISEQ_ELEMENT_LABEL;
1347 labelobj->link.next = 0;
1349 labelobj->label_no = ISEQ_COMPILE_DATA(iseq)->label_no++;
1350 labelobj->sc_state = 0;
1352 labelobj->refcnt = 0;
1354 labelobj->rescued = LABEL_RESCUE_NONE;
1355 labelobj->unremovable = 0;
1356 labelobj->position = -1;
1363 ADJUST *adjust = compile_data_alloc_adjust(iseq);
1364 adjust->link.type = ISEQ_ELEMENT_ADJUST;
1365 adjust->link.next = 0;
1366 adjust->label = label;
1367 adjust->line_no = line;
1368 LABEL_UNREMOVABLE(label);
1375 const char *types = insn_op_types(insn->insn_id);
1376 for (
int j = 0; types[j]; j++) {
1377 char type = types[j];
1384 func(OPERAND_AT(insn, j), data);
1393 iseq_insn_each_object_write_barrier(
VALUE obj,
VALUE iseq)
1399 new_insn_core(
rb_iseq_t *iseq,
int line_no,
int node_id,
int insn_id,
int argc,
VALUE *argv)
1401 INSN *iobj = compile_data_alloc_insn(iseq);
1405 iobj->link.type = ISEQ_ELEMENT_INSN;
1406 iobj->link.next = 0;
1407 iobj->insn_id = insn_id;
1408 iobj->insn_info.line_no = line_no;
1409 iobj->insn_info.node_id = node_id;
1410 iobj->insn_info.events = 0;
1411 iobj->operands = argv;
1412 iobj->operand_size = argc;
1415 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_write_barrier, (
VALUE)iseq);
1421 new_insn_body(
rb_iseq_t *iseq,
int line_no,
int node_id,
enum ruby_vminsn_type insn_id,
int argc, ...)
1423 VALUE *operands = 0;
1427 va_start(argv, argc);
1428 operands = compile_data_alloc2(iseq,
sizeof(
VALUE), argc);
1429 for (i = 0; i < argc; i++) {
1435 return new_insn_core(iseq, line_no, node_id, insn_id, argc, operands);
1441 VM_ASSERT(argc >= 0);
1444 flag |= VM_CALL_KWARG;
1445 argc += kw_arg->keyword_len;
1448 if (!(flag & (VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_BLOCKARG | VM_CALL_KWARG | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING))
1449 && !has_blockiseq) {
1450 flag |= VM_CALL_ARGS_SIMPLE;
1453 ISEQ_BODY(iseq)->ci_size++;
1454 const struct rb_callinfo *ci = vm_ci_new(mid, flag, argc, kw_arg);
1462 VALUE *operands = compile_data_calloc2(iseq,
sizeof(
VALUE), 2);
1465 operands[1] = (
VALUE)blockiseq;
1472 if (vm_ci_flag((
struct rb_callinfo *)ci) & VM_CALL_FORWARDING) {
1473 insn = new_insn_core(iseq, line_no, node_id, BIN(sendforward), 2, operands);
1476 insn = new_insn_core(iseq, line_no, node_id, BIN(send), 2, operands);
1489 VALUE ast_value = rb_ruby_ast_new(node);
1491 debugs(
"[new_child_iseq]> ---------------------------------------\n");
1492 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1493 ret_iseq = rb_iseq_new_with_opt(ast_value, name,
1494 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1496 isolated_depth ? isolated_depth + 1 : 0,
1497 type, ISEQ_COMPILE_DATA(iseq)->option,
1498 ISEQ_BODY(iseq)->variable.script_lines);
1499 debugs(
"[new_child_iseq]< ---------------------------------------\n");
1509 debugs(
"[new_child_iseq_with_callback]> ---------------------------------------\n");
1510 ret_iseq = rb_iseq_new_with_callback(ifunc, name,
1511 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1512 line_no, parent,
type, ISEQ_COMPILE_DATA(iseq)->option);
1513 debugs(
"[new_child_iseq_with_callback]< ---------------------------------------\n");
1521 ISEQ_COMPILE_DATA(iseq)->catch_except_p =
true;
1522 if (ISEQ_BODY(iseq)->parent_iseq != NULL) {
1523 if (ISEQ_COMPILE_DATA(ISEQ_BODY(iseq)->parent_iseq)) {
1524 set_catch_except_p((
rb_iseq_t *) ISEQ_BODY(iseq)->parent_iseq);
1547 while (pos < body->iseq_size) {
1548 insn = rb_vm_insn_decode(body->iseq_encoded[pos]);
1549 if (insn == BIN(
throw)) {
1550 set_catch_except_p(iseq);
1553 pos += insn_len(insn);
1559 for (i = 0; i < ct->size; i++) {
1561 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1562 if (entry->type != CATCH_TYPE_BREAK
1563 && entry->type != CATCH_TYPE_NEXT
1564 && entry->type != CATCH_TYPE_REDO) {
1566 ISEQ_COMPILE_DATA(iseq)->catch_except_p =
true;
1573 iseq_insert_nop_between_end_and_cont(
rb_iseq_t *iseq)
1575 VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
1576 if (
NIL_P(catch_table_ary))
return;
1577 unsigned int i, tlen = (
unsigned int)
RARRAY_LEN(catch_table_ary);
1579 for (i = 0; i < tlen; i++) {
1585 enum rb_catch_type ct = (
enum rb_catch_type)(
ptr[0] & 0xffff);
1587 if (ct != CATCH_TYPE_BREAK
1588 && ct != CATCH_TYPE_NEXT
1589 && ct != CATCH_TYPE_REDO) {
1591 for (e = end; e && (IS_LABEL(e) || IS_TRACE(e)); e = e->next) {
1593 INSN *nop = new_insn_core(iseq, 0, -1, BIN(nop), 0, 0);
1594 ELEM_INSERT_NEXT(end, &nop->link);
1607 if (
RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1612 if (compile_debug > 5)
1613 dump_disasm_list(FIRST_ELEMENT(anchor));
1615 debugs(
"[compile step 3.1 (iseq_optimize)]\n");
1616 iseq_optimize(iseq, anchor);
1618 if (compile_debug > 5)
1619 dump_disasm_list(FIRST_ELEMENT(anchor));
1621 if (ISEQ_COMPILE_DATA(iseq)->option->instructions_unification) {
1622 debugs(
"[compile step 3.2 (iseq_insns_unification)]\n");
1623 iseq_insns_unification(iseq, anchor);
1624 if (compile_debug > 5)
1625 dump_disasm_list(FIRST_ELEMENT(anchor));
1628 debugs(
"[compile step 3.4 (iseq_insert_nop_between_end_and_cont)]\n");
1629 iseq_insert_nop_between_end_and_cont(iseq);
1630 if (compile_debug > 5)
1631 dump_disasm_list(FIRST_ELEMENT(anchor));
1639 if (
RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1642 debugs(
"[compile step 4.1 (iseq_set_sequence)]\n");
1643 if (!iseq_set_sequence(iseq, anchor))
return COMPILE_NG;
1644 if (compile_debug > 5)
1645 dump_disasm_list(FIRST_ELEMENT(anchor));
1647 debugs(
"[compile step 4.2 (iseq_set_exception_table)]\n");
1648 if (!iseq_set_exception_table(iseq))
return COMPILE_NG;
1650 debugs(
"[compile step 4.3 (set_optargs_table)] \n");
1651 if (!iseq_set_optargs_table(iseq))
return COMPILE_NG;
1653 debugs(
"[compile step 5 (iseq_translate_threaded_code)] \n");
1654 if (!rb_iseq_translate_threaded_code(iseq))
return COMPILE_NG;
1656 debugs(
"[compile step 6 (update_catch_except_flags)] \n");
1658 update_catch_except_flags(iseq, ISEQ_BODY(iseq));
1660 debugs(
"[compile step 6.1 (remove unused catch tables)] \n");
1662 if (!ISEQ_COMPILE_DATA(iseq)->catch_except_p && ISEQ_BODY(iseq)->catch_table) {
1663 xfree(ISEQ_BODY(iseq)->catch_table);
1664 ISEQ_BODY(iseq)->catch_table = NULL;
1667 #if VM_INSN_INFO_TABLE_IMPL == 2
1668 if (ISEQ_BODY(iseq)->insns_info.succ_index_table == NULL) {
1669 debugs(
"[compile step 7 (rb_iseq_insns_info_encode_positions)] \n");
1670 rb_iseq_insns_info_encode_positions(iseq);
1674 if (compile_debug > 1) {
1675 VALUE str = rb_iseq_disasm(iseq);
1678 verify_call_cache(iseq);
1679 debugs(
"[compile step: finish]\n");
1685 iseq_set_exception_local_table(
rb_iseq_t *iseq)
1687 ISEQ_BODY(iseq)->local_table_size = numberof(rb_iseq_shared_exc_local_tbl);
1688 ISEQ_BODY(iseq)->local_table = rb_iseq_shared_exc_local_tbl;
1696 while (iseq != ISEQ_BODY(iseq)->local_iseq) {
1698 iseq = ISEQ_BODY(iseq)->parent_iseq;
1704 get_dyna_var_idx_at_raw(
const rb_iseq_t *iseq,
ID id)
1708 for (i = 0; i < ISEQ_BODY(iseq)->local_table_size; i++) {
1709 if (ISEQ_BODY(iseq)->local_table[i] == id) {
1719 int idx = get_dyna_var_idx_at_raw(ISEQ_BODY(iseq)->local_iseq,
id);
1722 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
1723 "get_local_var_idx: %d", idx);
1730 get_dyna_var_idx(
const rb_iseq_t *iseq,
ID id,
int *level,
int *ls)
1732 int lv = 0, idx = -1;
1733 const rb_iseq_t *
const topmost_iseq = iseq;
1736 idx = get_dyna_var_idx_at_raw(iseq,
id);
1740 iseq = ISEQ_BODY(iseq)->parent_iseq;
1745 COMPILE_ERROR(topmost_iseq, ISEQ_LAST_LINE(topmost_iseq),
1746 "get_dyna_var_idx: -1");
1750 *ls = ISEQ_BODY(iseq)->local_table_size;
1755 iseq_local_block_param_p(
const rb_iseq_t *iseq,
unsigned int idx,
unsigned int level)
1759 iseq = ISEQ_BODY(iseq)->parent_iseq;
1762 body = ISEQ_BODY(iseq);
1763 if (body->local_iseq == iseq &&
1764 body->
param.flags.has_block &&
1765 body->local_table_size - body->
param.block_start == idx) {
1774 iseq_block_param_id_p(
const rb_iseq_t *iseq,
ID id,
int *pidx,
int *plevel)
1777 int idx = get_dyna_var_idx(iseq,
id, &level, &ls);
1778 if (iseq_local_block_param_p(iseq, ls - idx, level)) {
1789 access_outer_variables(
const rb_iseq_t *iseq,
int level,
ID id,
bool write)
1791 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1793 if (isolated_depth && level >= isolated_depth) {
1795 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
"can not yield from isolated Proc");
1798 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
"can not access variable '%s' from isolated Proc",
rb_id2name(
id));
1802 for (
int i=0; i<level; i++) {
1804 struct rb_id_table *ovs = ISEQ_BODY(iseq)->outer_variables;
1807 ovs = ISEQ_BODY(iseq)->outer_variables = rb_id_table_create(8);
1810 if (rb_id_table_lookup(ISEQ_BODY(iseq)->outer_variables,
id, &val)) {
1811 if (write && !val) {
1812 rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables,
id,
Qtrue);
1816 rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables,
id, RBOOL(write));
1819 iseq = ISEQ_BODY(iseq)->parent_iseq;
1824 iseq_lvar_id(
const rb_iseq_t *iseq,
int idx,
int level)
1826 for (
int i=0; i<level; i++) {
1827 iseq = ISEQ_BODY(iseq)->parent_iseq;
1830 ID id = ISEQ_BODY(iseq)->local_table[ISEQ_BODY(iseq)->local_table_size - idx];
1838 if (iseq_local_block_param_p(iseq, idx, level)) {
1839 ADD_INSN2(seq, line_node, getblockparam,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1842 ADD_INSN2(seq, line_node, getlocal,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1844 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level),
Qfalse);
1850 if (iseq_local_block_param_p(iseq, idx, level)) {
1851 ADD_INSN2(seq, line_node, setblockparam,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1854 ADD_INSN2(seq, line_node, setlocal,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1856 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level),
Qtrue);
1865 if (body->
param.flags.has_opt ||
1866 body->
param.flags.has_post ||
1867 body->
param.flags.has_rest ||
1868 body->
param.flags.has_block ||
1869 body->
param.flags.has_kw ||
1870 body->
param.flags.has_kwrest) {
1872 if (body->
param.flags.has_block) {
1873 body->
param.size = body->
param.block_start + 1;
1875 else if (body->
param.flags.has_kwrest) {
1876 body->
param.size = body->
param.keyword->rest_start + 1;
1878 else if (body->
param.flags.has_kw) {
1879 body->
param.size = body->
param.keyword->bits_start + 1;
1881 else if (body->
param.flags.has_post) {
1884 else if (body->
param.flags.has_rest) {
1885 body->
param.size = body->
param.rest_start + 1;
1887 else if (body->
param.flags.has_opt) {
1905 struct rb_iseq_param_keyword *keyword;
1908 int kw = 0, rkw = 0, di = 0, i;
1910 body->
param.flags.has_kw = TRUE;
1911 body->
param.keyword = keyword =
ZALLOC_N(
struct rb_iseq_param_keyword, 1);
1915 node = node->nd_next;
1918 keyword->bits_start = arg_size++;
1920 node = args->kw_args;
1922 const NODE *val_node = get_nd_value(node->nd_body);
1925 if (val_node == NODE_SPECIAL_REQUIRED_KEYWORD) {
1929 switch (nd_type(val_node)) {
1931 dv = rb_node_sym_string_val(val_node);
1934 dv = rb_node_regx_string_val(val_node);
1937 dv = rb_node_line_lineno_val(val_node);
1940 dv = rb_node_integer_literal_val(val_node);
1943 dv = rb_node_float_literal_val(val_node);
1946 dv = rb_node_rational_literal_val(val_node);
1948 case NODE_IMAGINARY:
1949 dv = rb_node_imaginary_literal_val(val_node);
1952 dv = rb_node_encoding_val(val_node);
1964 NO_CHECK(COMPILE_POPPED(optargs,
"kwarg", RNODE(node)));
1968 keyword->num = ++di;
1972 node = node->nd_next;
1977 if (RNODE_DVAR(args->kw_rest_arg)->nd_vid != 0) {
1978 ID kw_id = iseq->body->local_table[arg_size];
1979 keyword->rest_start = arg_size++;
1980 body->
param.flags.has_kwrest = TRUE;
1982 if (kw_id == idPow) body->
param.flags.anon_kwrest = TRUE;
1984 keyword->required_num = rkw;
1985 keyword->table = &body->local_table[keyword->bits_start - keyword->num];
1990 for (i = 0; i <
RARRAY_LEN(default_values); i++) {
1992 if (dv == complex_mark) dv =
Qundef;
1996 keyword->default_values = dvs;
2005 if (!body->
param.flags.use_block) {
2006 body->
param.flags.use_block = 1;
2010 if (!vm->unused_block_warning_strict) {
2011 st_data_t key = (st_data_t)
rb_intern_str(body->location.label);
2012 st_insert(vm->unused_block_warning_table, key, 1);
2020 debugs(
"iseq_set_arguments: %s\n", node_args ?
"" :
"0");
2024 struct rb_args_info *args = &RNODE_ARGS(node_args)->nd_ainfo;
2030 EXPECT_NODE(
"iseq_set_arguments", node_args, NODE_ARGS, COMPILE_NG);
2032 body->
param.flags.ruby2_keywords = args->ruby2_keywords;
2033 body->
param.lead_num = arg_size = (int)args->pre_args_num;
2034 if (body->
param.lead_num > 0) body->
param.flags.has_lead = TRUE;
2035 debugs(
" - argc: %d\n", body->
param.lead_num);
2037 rest_id = args->rest_arg;
2038 if (rest_id == NODE_SPECIAL_EXCESSIVE_COMMA) {
2042 block_id = args->block_arg;
2044 bool optimized_forward = (args->forwarding && args->pre_args_num == 0 && !args->opt_args);
2046 if (optimized_forward) {
2051 if (args->opt_args) {
2059 label = NEW_LABEL(nd_line(RNODE(node)));
2061 ADD_LABEL(optargs, label);
2062 NO_CHECK(COMPILE_POPPED(optargs,
"optarg", node->nd_body));
2063 node = node->nd_next;
2068 label = NEW_LABEL(nd_line(node_args));
2070 ADD_LABEL(optargs, label);
2075 for (j = 0; j < i+1; j++) {
2080 body->
param.flags.has_opt = TRUE;
2081 body->
param.opt_num = i;
2082 body->
param.opt_table = opt_table;
2087 body->
param.rest_start = arg_size++;
2088 body->
param.flags.has_rest = TRUE;
2089 if (rest_id ==
'*') body->
param.flags.anon_rest = TRUE;
2093 if (args->first_post_arg) {
2094 body->
param.post_start = arg_size;
2095 body->
param.post_num = args->post_args_num;
2096 body->
param.flags.has_post = TRUE;
2097 arg_size += args->post_args_num;
2099 if (body->
param.flags.has_rest) {
2100 body->
param.post_start = body->
param.rest_start + 1;
2104 if (args->kw_args) {
2105 arg_size = iseq_set_arguments_keywords(iseq, optargs, args, arg_size);
2107 else if (args->kw_rest_arg && !optimized_forward) {
2108 ID kw_id = iseq->body->local_table[arg_size];
2109 struct rb_iseq_param_keyword *keyword =
ZALLOC_N(
struct rb_iseq_param_keyword, 1);
2110 keyword->rest_start = arg_size++;
2111 body->
param.keyword = keyword;
2112 body->
param.flags.has_kwrest = TRUE;
2114 static ID anon_kwrest = 0;
2115 if (!anon_kwrest) anon_kwrest =
rb_intern(
"**");
2116 if (kw_id == anon_kwrest) body->
param.flags.anon_kwrest = TRUE;
2118 else if (args->no_kwarg) {
2119 body->
param.flags.accepts_no_kwarg = TRUE;
2123 body->
param.block_start = arg_size++;
2124 body->
param.flags.has_block = TRUE;
2125 iseq_set_use_block(iseq);
2129 if (optimized_forward) {
2130 body->
param.flags.use_block = 1;
2131 body->
param.flags.forwardable = TRUE;
2135 iseq_calc_param_size(iseq);
2136 body->
param.size = arg_size;
2138 if (args->pre_init) {
2139 NO_CHECK(COMPILE_POPPED(optargs,
"init arguments (m)", args->pre_init));
2141 if (args->post_init) {
2142 NO_CHECK(COMPILE_POPPED(optargs,
"init arguments (p)", args->post_init));
2145 if (body->type == ISEQ_TYPE_BLOCK) {
2146 if (body->
param.flags.has_opt == FALSE &&
2147 body->
param.flags.has_post == FALSE &&
2148 body->
param.flags.has_rest == FALSE &&
2149 body->
param.flags.has_kw == FALSE &&
2150 body->
param.flags.has_kwrest == FALSE) {
2152 if (body->
param.lead_num == 1 && last_comma == 0) {
2154 body->
param.flags.ambiguous_param0 = TRUE;
2166 unsigned int size = tbl ? tbl->size : 0;
2167 unsigned int offset = 0;
2170 struct rb_args_info *args = &RNODE_ARGS(node_args)->nd_ainfo;
2175 if (args->forwarding && args->pre_args_num == 0 && !args->opt_args) {
2183 MEMCPY(ids, tbl->ids + offset,
ID, size);
2184 ISEQ_BODY(iseq)->local_table = ids;
2186 ISEQ_BODY(iseq)->local_table_size = size;
2188 debugs(
"iseq_set_local_table: %u\n", ISEQ_BODY(iseq)->local_table_size);
2200 else if ((tlit = OBJ_BUILTIN_TYPE(lit)) == -1) {
2203 else if ((tval = OBJ_BUILTIN_TYPE(val)) == -1) {
2206 else if (tlit != tval) {
2224 return rb_float_cmp(lit, val);
2227 const struct RRational *rat1 = RRATIONAL(val);
2228 const struct RRational *rat2 = RRATIONAL(lit);
2229 return rb_iseq_cdhash_cmp(rat1->num, rat2->num) || rb_iseq_cdhash_cmp(rat1->den, rat2->den);
2232 const struct RComplex *comp1 = RCOMPLEX(val);
2233 const struct RComplex *comp2 = RCOMPLEX(lit);
2234 return rb_iseq_cdhash_cmp(comp1->real, comp2->real) || rb_iseq_cdhash_cmp(comp1->imag, comp2->imag);
2237 return rb_reg_equal(val, lit) ? 0 : -1;
2245 rb_iseq_cdhash_hash(
VALUE a)
2247 switch (OBJ_BUILTIN_TYPE(a)) {
2250 return (st_index_t)a;
2258 return rb_rational_hash(a);
2260 return rb_complex_hash(a);
2270 rb_iseq_cdhash_hash,
2292 return INT2FIX(ISEQ_BODY(iseq)->ivc_size++);
2299 struct rb_id_table *tbl = ISEQ_COMPILE_DATA(iseq)->ivar_cache_table;
2301 if (rb_id_table_lookup(tbl,
id,&val)) {
2306 tbl = rb_id_table_create(1);
2307 ISEQ_COMPILE_DATA(iseq)->ivar_cache_table = tbl;
2309 val =
INT2FIX(ISEQ_BODY(iseq)->icvarc_size++);
2310 rb_id_table_insert(tbl,
id,val);
2314 #define BADINSN_DUMP(anchor, list, dest) \
2315 dump_disasm_list_with_cursor(FIRST_ELEMENT(anchor), list, dest)
2317 #define BADINSN_ERROR \
2318 (xfree(generated_iseq), \
2319 xfree(insns_info), \
2320 BADINSN_DUMP(anchor, list, NULL), \
2326 int stack_max = 0, sp = 0, line = 0;
2329 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2330 if (IS_LABEL(list)) {
2336 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2337 switch (list->type) {
2338 case ISEQ_ELEMENT_INSN:
2346 sp = calc_sp_depth(sp, iobj);
2348 BADINSN_DUMP(anchor, list, NULL);
2349 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2350 "argument stack underflow (%d)", sp);
2353 if (sp > stack_max) {
2357 line = iobj->insn_info.line_no;
2359 operands = iobj->operands;
2360 insn = iobj->insn_id;
2361 types = insn_op_types(insn);
2362 len = insn_len(insn);
2365 if (iobj->operand_size !=
len - 1) {
2367 BADINSN_DUMP(anchor, list, NULL);
2368 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2369 "operand size miss! (%d for %d)",
2370 iobj->operand_size,
len - 1);
2374 for (j = 0; types[j]; j++) {
2375 if (types[j] == TS_OFFSET) {
2379 BADINSN_DUMP(anchor, list, NULL);
2380 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2381 "unknown label: "LABEL_FORMAT, lobj->label_no);
2384 if (lobj->sp == -1) {
2387 else if (lobj->sp != sp) {
2388 debugs(
"%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2390 lobj->label_no, lobj->sp, sp);
2396 case ISEQ_ELEMENT_LABEL:
2399 if (lobj->sp == -1) {
2403 if (lobj->sp != sp) {
2404 debugs(
"%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2406 lobj->label_no, lobj->sp, sp);
2412 case ISEQ_ELEMENT_TRACE:
2417 case ISEQ_ELEMENT_ADJUST:
2422 sp = adjust->label ? adjust->label->sp : 0;
2423 if (adjust->line_no != -1 && orig_sp - sp < 0) {
2424 BADINSN_DUMP(anchor, list, NULL);
2425 COMPILE_ERROR(iseq, adjust->line_no,
2426 "iseq_set_sequence: adjust bug %d < %d",
2433 BADINSN_DUMP(anchor, list, NULL);
2434 COMPILE_ERROR(iseq, line,
"unknown list type: %d", list->type);
2443 int insns_info_index,
int code_index,
const INSN *iobj)
2445 if (insns_info_index == 0 ||
2446 insns_info[insns_info_index-1].line_no != iobj->insn_info.line_no ||
2447 #ifdef USE_ISEQ_NODE_ID
2448 insns_info[insns_info_index-1].node_id != iobj->insn_info.node_id ||
2450 insns_info[insns_info_index-1].events != iobj->insn_info.events) {
2451 insns_info[insns_info_index].line_no = iobj->insn_info.line_no;
2452 #ifdef USE_ISEQ_NODE_ID
2453 insns_info[insns_info_index].node_id = iobj->insn_info.node_id;
2455 insns_info[insns_info_index].events = iobj->insn_info.events;
2456 positions[insns_info_index] = code_index;
2464 int insns_info_index,
int code_index,
const ADJUST *adjust)
2466 insns_info[insns_info_index].line_no = adjust->line_no;
2467 insns_info[insns_info_index].node_id = -1;
2468 insns_info[insns_info_index].events = 0;
2469 positions[insns_info_index] = code_index;
2474 array_to_idlist(
VALUE arr)
2479 for (
int i = 0; i < size; i++) {
2488 idlist_to_array(
const ID *ids)
2505 unsigned int *positions;
2507 VALUE *generated_iseq;
2511 int insn_num, code_index, insns_info_index, sp = 0;
2512 int stack_max = fix_sp_depth(iseq, anchor);
2514 if (stack_max < 0)
return COMPILE_NG;
2517 insn_num = code_index = 0;
2518 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2519 switch (list->type) {
2520 case ISEQ_ELEMENT_INSN:
2524 sp = calc_sp_depth(sp, iobj);
2526 events = iobj->insn_info.events |= events;
2527 if (ISEQ_COVERAGE(iseq)) {
2528 if (ISEQ_LINE_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_LINE) &&
2529 !(rb_get_coverage_mode() & COVERAGE_TARGET_ONESHOT_LINES)) {
2530 int line = iobj->insn_info.line_no - 1;
2531 if (line >= 0 && line <
RARRAY_LEN(ISEQ_LINE_COVERAGE(iseq))) {
2535 if (ISEQ_BRANCH_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_BRANCH)) {
2536 while (
RARRAY_LEN(ISEQ_PC2BRANCHINDEX(iseq)) <= code_index) {
2542 code_index += insn_data_length(iobj);
2547 case ISEQ_ELEMENT_LABEL:
2550 lobj->position = code_index;
2551 if (lobj->sp != sp) {
2552 debugs(
"%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2554 lobj->label_no, lobj->sp, sp);
2559 case ISEQ_ELEMENT_TRACE:
2562 events |= trace->event;
2563 if (trace->event & RUBY_EVENT_COVERAGE_BRANCH) data = trace->data;
2566 case ISEQ_ELEMENT_ADJUST:
2569 if (adjust->line_no != -1) {
2571 sp = adjust->label ? adjust->label->sp : 0;
2572 if (orig_sp - sp > 0) {
2573 if (orig_sp - sp > 1) code_index++;
2587 positions =
ALLOC_N(
unsigned int, insn_num);
2588 if (ISEQ_IS_SIZE(body)) {
2592 body->is_entries = NULL;
2595 ISEQ_COMPILE_DATA(iseq)->ci_index = 0;
2602 iseq_bits_t * mark_offset_bits;
2603 int code_size = code_index;
2605 iseq_bits_t tmp[1] = {0};
2606 bool needs_bitmap =
false;
2608 if (ISEQ_MBITS_BUFLEN(code_index) == 1) {
2609 mark_offset_bits = tmp;
2612 mark_offset_bits =
ZALLOC_N(iseq_bits_t, ISEQ_MBITS_BUFLEN(code_index));
2615 list = FIRST_ELEMENT(anchor);
2616 insns_info_index = code_index = sp = 0;
2619 switch (list->type) {
2620 case ISEQ_ELEMENT_INSN:
2628 sp = calc_sp_depth(sp, iobj);
2630 operands = iobj->operands;
2631 insn = iobj->insn_id;
2632 generated_iseq[code_index] = insn;
2633 types = insn_op_types(insn);
2634 len = insn_len(insn);
2636 for (j = 0; types[j]; j++) {
2637 char type = types[j];
2645 generated_iseq[code_index + 1 + j] = lobj->position - (code_index +
len);
2650 VALUE map = operands[j];
2653 data.pos = code_index;
2657 rb_hash_rehash(map);
2658 freeze_hide_obj(map);
2659 generated_iseq[code_index + 1 + j] = map;
2660 ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2662 needs_bitmap =
true;
2667 generated_iseq[code_index + 1 + j] =
FIX2INT(operands[j]);
2672 VALUE v = operands[j];
2673 generated_iseq[code_index + 1 + j] = v;
2677 ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2678 needs_bitmap =
true;
2685 unsigned int ic_index = ISEQ_COMPILE_DATA(iseq)->ic_index++;
2686 IC ic = &ISEQ_IS_ENTRY_START(body,
type)[ic_index].ic_cache;
2687 if (UNLIKELY(ic_index >= body->ic_size)) {
2688 BADINSN_DUMP(anchor, &iobj->link, 0);
2689 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2690 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2691 ic_index, ISEQ_IS_SIZE(body));
2694 ic->
segments = array_to_idlist(operands[j]);
2696 generated_iseq[code_index + 1 + j] = (
VALUE)ic;
2701 unsigned int ic_index =
FIX2UINT(operands[j]);
2703 IVC cache = ((
IVC)&body->is_entries[ic_index]);
2705 if (insn == BIN(setinstancevariable)) {
2706 cache->iv_set_name =
SYM2ID(operands[j - 1]);
2709 cache->iv_set_name = 0;
2712 vm_ic_attr_index_initialize(cache, INVALID_SHAPE_ID);
2717 unsigned int ic_index =
FIX2UINT(operands[j]);
2718 IC ic = &ISEQ_IS_ENTRY_START(body,
type)[ic_index].ic_cache;
2719 if (UNLIKELY(ic_index >= ISEQ_IS_SIZE(body))) {
2720 BADINSN_DUMP(anchor, &iobj->link, 0);
2721 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2722 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2723 ic_index, ISEQ_IS_SIZE(body));
2725 generated_iseq[code_index + 1 + j] = (
VALUE)ic;
2732 RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq)->ci_index <= body->ci_size);
2733 struct rb_call_data *cd = &body->call_data[ISEQ_COMPILE_DATA(iseq)->ci_index++];
2735 cd->cc = vm_cc_empty();
2736 generated_iseq[code_index + 1 + j] = (
VALUE)cd;
2740 generated_iseq[code_index + 1 + j] =
SYM2ID(operands[j]);
2743 generated_iseq[code_index + 1 + j] = operands[j];
2746 generated_iseq[code_index + 1 + j] = operands[j];
2749 BADINSN_ERROR(iseq, iobj->insn_info.line_no,
2750 "unknown operand type: %c",
type);
2754 if (add_insn_info(insns_info, positions, insns_info_index, code_index, iobj)) insns_info_index++;
2758 case ISEQ_ELEMENT_LABEL:
2761 if (lobj->sp != sp) {
2762 debugs(
"%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2764 lobj->label_no, lobj->sp, sp);
2769 case ISEQ_ELEMENT_ADJUST:
2774 if (adjust->label) {
2775 sp = adjust->label->sp;
2781 if (adjust->line_no != -1) {
2782 const int diff = orig_sp - sp;
2784 if (insns_info_index == 0) {
2785 COMPILE_ERROR(iseq, adjust->line_no,
2786 "iseq_set_sequence: adjust bug (ISEQ_ELEMENT_ADJUST must not be the first in iseq)");
2788 if (add_adjust_info(insns_info, positions, insns_info_index, code_index, adjust)) insns_info_index++;
2791 generated_iseq[code_index++] = BIN(adjuststack);
2792 generated_iseq[code_index++] = orig_sp - sp;
2794 else if (diff == 1) {
2795 generated_iseq[code_index++] = BIN(pop);
2797 else if (diff < 0) {
2798 int label_no = adjust->label ? adjust->label->label_no : -1;
2799 xfree(generated_iseq);
2802 if (ISEQ_MBITS_BUFLEN(code_size) > 1) {
2803 xfree(mark_offset_bits);
2805 debug_list(anchor, list);
2806 COMPILE_ERROR(iseq, adjust->line_no,
2807 "iseq_set_sequence: adjust bug to %d %d < %d",
2808 label_no, orig_sp, sp);
2821 body->iseq_encoded = (
void *)generated_iseq;
2822 body->iseq_size = code_index;
2823 body->stack_max = stack_max;
2825 if (ISEQ_MBITS_BUFLEN(body->iseq_size) == 1) {
2826 body->mark_bits.single = mark_offset_bits[0];
2830 body->mark_bits.list = mark_offset_bits;
2833 body->mark_bits.list = 0;
2839 body->insns_info.body = insns_info;
2840 body->insns_info.positions = positions;
2843 body->insns_info.body = insns_info;
2844 REALLOC_N(positions,
unsigned int, insns_info_index);
2845 body->insns_info.positions = positions;
2846 body->insns_info.size = insns_info_index;
2852 label_get_position(
LABEL *lobj)
2854 return lobj->position;
2858 label_get_sp(
LABEL *lobj)
2864 iseq_set_exception_table(
rb_iseq_t *iseq)
2867 unsigned int tlen, i;
2870 ISEQ_BODY(iseq)->catch_table = NULL;
2872 VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
2873 if (
NIL_P(catch_table_ary))
return COMPILE_OK;
2881 for (i = 0; i < table->size; i++) {
2884 entry = UNALIGNED_MEMBER_PTR(table, entries[i]);
2885 entry->type = (
enum rb_catch_type)(
ptr[0] & 0xffff);
2886 pos = label_get_position((
LABEL *)(
ptr[1] & ~1));
2888 entry->start = (
unsigned int)pos;
2889 pos = label_get_position((
LABEL *)(
ptr[2] & ~1));
2891 entry->end = (
unsigned int)pos;
2898 entry->cont = label_get_position(lobj);
2899 entry->sp = label_get_sp(lobj);
2902 if (entry->type == CATCH_TYPE_RESCUE ||
2903 entry->type == CATCH_TYPE_BREAK ||
2904 entry->type == CATCH_TYPE_NEXT) {
2913 ISEQ_BODY(iseq)->catch_table = table;
2914 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, 0);
2935 VALUE *opt_table = (
VALUE *)ISEQ_BODY(iseq)->param.opt_table;
2937 if (ISEQ_BODY(iseq)->param.flags.has_opt) {
2938 for (i = 0; i < ISEQ_BODY(iseq)->param.opt_num + 1; i++) {
2939 opt_table[i] = label_get_position((
LABEL *)opt_table[i]);
2946 get_destination_insn(
INSN *iobj)
2952 list = lobj->link.next;
2954 switch (list->type) {
2955 case ISEQ_ELEMENT_INSN:
2956 case ISEQ_ELEMENT_ADJUST:
2958 case ISEQ_ELEMENT_LABEL:
2961 case ISEQ_ELEMENT_TRACE:
2964 events |= trace->event;
2972 if (list && IS_INSN(list)) {
2974 iobj->insn_info.events |= events;
2980 get_next_insn(
INSN *iobj)
2985 if (IS_INSN(list) || IS_ADJUST(list)) {
2994 get_prev_insn(
INSN *iobj)
2999 if (IS_INSN(list) || IS_ADJUST(list)) {
3008 unref_destination(
INSN *iobj,
int pos)
3010 LABEL *lobj = (
LABEL *)OPERAND_AT(iobj, pos);
3012 if (!lobj->refcnt) ELEM_REMOVE(&lobj->link);
3016 replace_destination(
INSN *dobj,
INSN *nobj)
3018 VALUE n = OPERAND_AT(nobj, 0);
3021 if (dl == nl)
return false;
3024 OPERAND_AT(dobj, 0) = n;
3025 if (!dl->refcnt) ELEM_REMOVE(&dl->link);
3030 find_destination(
INSN *i)
3032 int pos,
len = insn_len(i->insn_id);
3033 for (pos = 0; pos <
len; ++pos) {
3034 if (insn_op_types(i->insn_id)[pos] == TS_OFFSET) {
3035 return (
LABEL *)OPERAND_AT(i, pos);
3045 int *unref_counts = 0, nlabels = ISEQ_COMPILE_DATA(iseq)->label_no;
3048 unref_counts =
ALLOCA_N(
int, nlabels);
3049 MEMZERO(unref_counts,
int, nlabels);
3054 if (IS_INSN_ID(i, leave)) {
3058 else if ((lab = find_destination((
INSN *)i)) != 0) {
3059 unref_counts[lab->label_no]++;
3062 else if (IS_LABEL(i)) {
3064 if (lab->unremovable)
return 0;
3065 if (lab->refcnt > unref_counts[lab->label_no]) {
3066 if (i == first)
return 0;
3071 else if (IS_TRACE(i)) {
3074 else if (IS_ADJUST(i)) {
3078 }
while ((i = i->next) != 0);
3083 VALUE insn = INSN_OF(i);
3084 int pos,
len = insn_len(insn);
3085 for (pos = 0; pos <
len; ++pos) {
3086 switch (insn_op_types(insn)[pos]) {
3088 unref_destination((
INSN *)i, pos);
3097 }
while ((i != end) && (i = i->next) != 0);
3104 switch (OPERAND_AT(iobj, 0)) {
3106 ELEM_REMOVE(&iobj->link);
3109 ELEM_REMOVE(&iobj->link);
3112 iobj->insn_id = BIN(adjuststack);
3118 is_frozen_putstring(
INSN *insn,
VALUE *op)
3120 if (IS_INSN_ID(insn, putstring) || IS_INSN_ID(insn, putchilledstring)) {
3121 *op = OPERAND_AT(insn, 0);
3124 else if (IS_INSN_ID(insn, putobject)) {
3125 *op = OPERAND_AT(insn, 0);
3156 INSN *niobj, *ciobj, *dup = 0;
3160 switch (INSN_OF(iobj)) {
3161 case BIN(putstring):
3162 case BIN(putchilledstring):
3168 case BIN(putobject):
3171 default:
return FALSE;
3174 ciobj = (
INSN *)get_next_insn(iobj);
3175 if (IS_INSN_ID(ciobj, jump)) {
3176 ciobj = (
INSN *)get_next_insn((
INSN*)OPERAND_AT(ciobj, 0));
3178 if (IS_INSN_ID(ciobj, dup)) {
3179 ciobj = (
INSN *)get_next_insn(dup = ciobj);
3181 if (!ciobj || !IS_INSN_ID(ciobj, checktype))
return FALSE;
3182 niobj = (
INSN *)get_next_insn(ciobj);
3187 switch (INSN_OF(niobj)) {
3189 if (OPERAND_AT(ciobj, 0) ==
type) {
3190 dest = (
LABEL *)OPERAND_AT(niobj, 0);
3193 case BIN(branchunless):
3194 if (OPERAND_AT(ciobj, 0) !=
type) {
3195 dest = (
LABEL *)OPERAND_AT(niobj, 0);
3201 line = ciobj->insn_info.line_no;
3202 node_id = ciobj->insn_info.node_id;
3204 if (niobj->link.next && IS_LABEL(niobj->link.next)) {
3205 dest = (
LABEL *)niobj->link.next;
3208 dest = NEW_LABEL(line);
3209 ELEM_INSERT_NEXT(&niobj->link, &dest->link);
3212 INSERT_AFTER_INSN1(iobj, line, node_id, jump, dest);
3214 if (!dup) INSERT_AFTER_INSN(iobj, line, node_id, pop);
3221 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3222 vm_ci_flag(ci) | add,
3232 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3240 #define vm_ci_simple(ci) (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)
3248 optimize_checktype(iseq, iobj);
3250 if (IS_INSN_ID(iobj, jump)) {
3251 INSN *niobj, *diobj, *piobj;
3252 diobj = (
INSN *)get_destination_insn(iobj);
3253 niobj = (
INSN *)get_next_insn(iobj);
3255 if (diobj == niobj) {
3262 unref_destination(iobj, 0);
3263 ELEM_REMOVE(&iobj->link);
3266 else if (iobj != diobj && IS_INSN(&diobj->link) &&
3267 IS_INSN_ID(diobj, jump) &&
3268 OPERAND_AT(iobj, 0) != OPERAND_AT(diobj, 0) &&
3269 diobj->insn_info.events == 0) {
3280 if (replace_destination(iobj, diobj)) {
3281 remove_unreachable_chunk(iseq, iobj->link.next);
3285 else if (IS_INSN_ID(diobj, leave)) {
3298 unref_destination(iobj, 0);
3299 iobj->insn_id = BIN(leave);
3300 iobj->operand_size = 0;
3301 iobj->insn_info = diobj->insn_info;
3304 else if (IS_INSN(iobj->link.prev) &&
3305 (piobj = (
INSN *)iobj->link.prev) &&
3306 (IS_INSN_ID(piobj, branchif) ||
3307 IS_INSN_ID(piobj, branchunless))) {
3308 INSN *pdiobj = (
INSN *)get_destination_insn(piobj);
3309 if (niobj == pdiobj) {
3310 int refcnt = IS_LABEL(piobj->link.next) ?
3311 ((
LABEL *)piobj->link.next)->refcnt : 0;
3326 piobj->insn_id = (IS_INSN_ID(piobj, branchif))
3327 ? BIN(branchunless) : BIN(branchif);
3328 if (replace_destination(piobj, iobj) && refcnt <= 1) {
3329 ELEM_REMOVE(&iobj->link);
3336 else if (diobj == pdiobj) {
3350 INSN *popiobj = new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, BIN(pop), 0, 0);
3351 ELEM_REPLACE(&piobj->link, &popiobj->link);
3354 if (remove_unreachable_chunk(iseq, iobj->link.next)) {
3368 if (IS_INSN_ID(iobj, newrange)) {
3369 INSN *
const range = iobj;
3371 VALUE str_beg, str_end;
3373 if ((end = (
INSN *)get_prev_insn(range)) != 0 &&
3374 is_frozen_putstring(end, &str_end) &&
3375 (beg = (
INSN *)get_prev_insn(end)) != 0 &&
3376 is_frozen_putstring(beg, &str_beg)) {
3377 int excl =
FIX2INT(OPERAND_AT(range, 0));
3380 ELEM_REMOVE(&beg->link);
3381 ELEM_REMOVE(&end->link);
3382 range->insn_id = BIN(putobject);
3383 OPERAND_AT(range, 0) = lit_range;
3388 if (IS_INSN_ID(iobj, leave)) {
3389 remove_unreachable_chunk(iseq, iobj->link.next);
3401 if (IS_INSN_ID(iobj, duparray)) {
3403 if (IS_INSN(next) && (IS_INSN_ID(next, concatarray) || IS_INSN_ID(next, concattoarray))) {
3404 iobj->insn_id = BIN(putobject);
3414 if (IS_INSN_ID(iobj, duparray)) {
3416 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3420 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3421 VALUE ary = iobj->operands[0];
3424 iobj->insn_id = BIN(opt_ary_freeze);
3425 iobj->operand_size = 2;
3426 iobj->operands = compile_data_calloc2(iseq, iobj->operand_size,
sizeof(
VALUE));
3427 iobj->operands[0] = ary;
3428 iobj->operands[1] = (
VALUE)ci;
3440 if (IS_INSN_ID(iobj, duphash)) {
3442 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3446 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3447 VALUE hash = iobj->operands[0];
3450 iobj->insn_id = BIN(opt_hash_freeze);
3451 iobj->operand_size = 2;
3452 iobj->operands = compile_data_calloc2(iseq, iobj->operand_size,
sizeof(
VALUE));
3453 iobj->operands[0] = hash;
3454 iobj->operands[1] = (
VALUE)ci;
3466 if (IS_INSN_ID(iobj, newarray) && iobj->operands[0] ==
INT2FIX(0)) {
3468 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3472 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3473 iobj->insn_id = BIN(opt_ary_freeze);
3474 iobj->operand_size = 2;
3475 iobj->operands = compile_data_calloc2(iseq, iobj->operand_size,
sizeof(
VALUE));
3476 iobj->operands[0] = rb_cArray_empty_frozen;
3477 iobj->operands[1] = (
VALUE)ci;
3489 if (IS_INSN_ID(iobj, newhash) && iobj->operands[0] ==
INT2FIX(0)) {
3491 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3495 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3496 iobj->insn_id = BIN(opt_hash_freeze);
3497 iobj->operand_size = 2;
3498 iobj->operands = compile_data_calloc2(iseq, iobj->operand_size,
sizeof(
VALUE));
3499 iobj->operands[0] = rb_cHash_empty_frozen;
3500 iobj->operands[1] = (
VALUE)ci;
3506 if (IS_INSN_ID(iobj, branchif) ||
3507 IS_INSN_ID(iobj, branchnil) ||
3508 IS_INSN_ID(iobj, branchunless)) {
3517 INSN *nobj = (
INSN *)get_destination_insn(iobj);
3539 int stop_optimization =
3540 ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq) &&
3541 nobj->link.type == ISEQ_ELEMENT_INSN &&
3542 nobj->insn_info.events;
3543 if (!stop_optimization) {
3544 INSN *pobj = (
INSN *)iobj->link.prev;
3547 if (!IS_INSN(&pobj->link))
3549 else if (IS_INSN_ID(pobj, dup))
3554 if (IS_INSN(&nobj->link) && IS_INSN_ID(nobj, jump)) {
3555 if (!replace_destination(iobj, nobj))
break;
3557 else if (prev_dup && IS_INSN_ID(nobj, dup) &&
3558 !!(nobj = (
INSN *)nobj->link.next) &&
3560 nobj->insn_id == iobj->insn_id) {
3576 if (!replace_destination(iobj, nobj))
break;
3604 if (prev_dup && IS_INSN(pobj->link.prev)) {
3605 pobj = (
INSN *)pobj->link.prev;
3607 if (IS_INSN_ID(pobj, putobject)) {
3608 cond = (IS_INSN_ID(iobj, branchif) ?
3609 OPERAND_AT(pobj, 0) !=
Qfalse :
3610 IS_INSN_ID(iobj, branchunless) ?
3611 OPERAND_AT(pobj, 0) ==
Qfalse :
3614 else if (IS_INSN_ID(pobj, putstring) ||
3615 IS_INSN_ID(pobj, duparray) ||
3616 IS_INSN_ID(pobj, newarray)) {
3617 cond = IS_INSN_ID(iobj, branchif);
3619 else if (IS_INSN_ID(pobj, putnil)) {
3620 cond = !IS_INSN_ID(iobj, branchif);
3623 if (prev_dup || !IS_INSN_ID(pobj, newarray)) {
3624 ELEM_REMOVE(iobj->link.prev);
3626 else if (!iseq_pop_newarray(iseq, pobj)) {
3627 pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(pop), 0, NULL);
3628 ELEM_INSERT_PREV(&iobj->link, &pobj->link);
3632 pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(putnil), 0, NULL);
3633 ELEM_INSERT_NEXT(&iobj->link, &pobj->link);
3635 iobj->insn_id = BIN(jump);
3639 unref_destination(iobj, 0);
3640 ELEM_REMOVE(&iobj->link);
3645 nobj = (
INSN *)get_destination_insn(nobj);
3650 if (IS_INSN_ID(iobj, pop)) {
3658 if (IS_INSN(prev)) {
3659 enum ruby_vminsn_type previ = ((
INSN *)prev)->insn_id;
3660 if (previ == BIN(putobject) || previ == BIN(putnil) ||
3661 previ == BIN(putself) || previ == BIN(putstring) ||
3662 previ == BIN(putchilledstring) ||
3663 previ == BIN(dup) ||
3664 previ == BIN(getlocal) ||
3665 previ == BIN(getblockparam) ||
3666 previ == BIN(getblockparamproxy) ||
3667 previ == BIN(getinstancevariable) ||
3668 previ == BIN(duparray)) {
3672 ELEM_REMOVE(&iobj->link);
3674 else if (previ == BIN(newarray) && iseq_pop_newarray(iseq, (
INSN*)prev)) {
3675 ELEM_REMOVE(&iobj->link);
3677 else if (previ == BIN(concatarray)) {
3679 INSERT_BEFORE_INSN1(piobj, piobj->insn_info.line_no, piobj->insn_info.node_id, splatarray,
Qfalse);
3680 INSN_OF(piobj) = BIN(pop);
3682 else if (previ == BIN(concatstrings)) {
3683 if (OPERAND_AT(prev, 0) ==
INT2FIX(1)) {
3687 ELEM_REMOVE(&iobj->link);
3688 INSN_OF(prev) = BIN(adjuststack);
3694 if (IS_INSN_ID(iobj, newarray) ||
3695 IS_INSN_ID(iobj, duparray) ||
3696 IS_INSN_ID(iobj, concatarray) ||
3697 IS_INSN_ID(iobj, splatarray) ||
3707 if (IS_INSN(next) && IS_INSN_ID(next, splatarray)) {
3713 if (IS_INSN_ID(iobj, newarray)) {
3715 if (IS_INSN(next) && IS_INSN_ID(next, expandarray) &&
3716 OPERAND_AT(next, 1) ==
INT2FIX(0)) {
3718 op1 = OPERAND_AT(iobj, 0);
3719 op2 = OPERAND_AT(next, 0);
3730 INSN_OF(iobj) = BIN(swap);
3731 iobj->operand_size = 0;
3740 INSN_OF(iobj) = BIN(opt_reverse);
3745 INSN_OF(iobj) = BIN(opt_reverse);
3746 OPERAND_AT(iobj, 0) = OPERAND_AT(next, 0);
3756 for (; diff > 0; diff--) {
3757 INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, pop);
3768 for (; diff < 0; diff++) {
3769 INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, putnil);
3776 if (IS_INSN_ID(iobj, duparray)) {
3785 if (IS_INSN(next) && IS_INSN_ID(next, expandarray)) {
3786 INSN_OF(iobj) = BIN(putobject);
3790 if (IS_INSN_ID(iobj, anytostring)) {
3798 if (IS_INSN(next) && IS_INSN_ID(next, concatstrings) &&
3799 OPERAND_AT(next, 0) ==
INT2FIX(1)) {
3804 if (IS_INSN_ID(iobj, putstring) || IS_INSN_ID(iobj, putchilledstring) ||
3812 if (IS_NEXT_INSN_ID(&iobj->link, concatstrings) &&
3814 INSN *next = (
INSN *)iobj->link.next;
3815 if ((OPERAND_AT(next, 0) = FIXNUM_INC(OPERAND_AT(next, 0), -1)) ==
INT2FIX(1)) {
3816 ELEM_REMOVE(&next->link);
3818 ELEM_REMOVE(&iobj->link);
3822 if (IS_INSN_ID(iobj, concatstrings)) {
3831 if (IS_INSN(next) && IS_INSN_ID(next, jump))
3832 next = get_destination_insn(jump = (
INSN *)next);
3833 if (IS_INSN(next) && IS_INSN_ID(next, concatstrings)) {
3834 int n =
FIX2INT(OPERAND_AT(iobj, 0)) +
FIX2INT(OPERAND_AT(next, 0)) - 1;
3835 OPERAND_AT(iobj, 0) =
INT2FIX(n);
3837 LABEL *label = ((
LABEL *)OPERAND_AT(jump, 0));
3838 if (!--label->refcnt) {
3839 ELEM_REMOVE(&label->link);
3842 label = NEW_LABEL(0);
3843 OPERAND_AT(jump, 0) = (
VALUE)label;
3846 ELEM_INSERT_NEXT(next, &label->link);
3847 CHECK(iseq_peephole_optimize(iseq, get_next_insn(jump), do_tailcallopt));
3855 if (do_tailcallopt &&
3856 (IS_INSN_ID(iobj, send) ||
3857 IS_INSN_ID(iobj, opt_aref_with) ||
3858 IS_INSN_ID(iobj, opt_aset_with) ||
3859 IS_INSN_ID(iobj, invokesuper))) {
3868 if (iobj->link.next) {
3871 if (!IS_INSN(next)) {
3875 switch (INSN_OF(next)) {
3884 next = get_destination_insn((
INSN *)next);
3898 if (IS_INSN_ID(piobj, send) ||
3899 IS_INSN_ID(piobj, invokesuper)) {
3900 if (OPERAND_AT(piobj, 1) == 0) {
3901 ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
3902 OPERAND_AT(piobj, 0) = (
VALUE)ci;
3907 ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
3908 OPERAND_AT(piobj, 0) = (
VALUE)ci;
3914 if (IS_INSN_ID(iobj, dup)) {
3915 if (IS_NEXT_INSN_ID(&iobj->link, setlocal)) {
3926 if (IS_NEXT_INSN_ID(set1, setlocal)) {
3928 if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
3929 OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
3931 ELEM_REMOVE(&iobj->link);
3944 else if (IS_NEXT_INSN_ID(set1, dup) &&
3945 IS_NEXT_INSN_ID(set1->next, setlocal)) {
3946 set2 = set1->next->next;
3947 if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
3948 OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
3949 ELEM_REMOVE(set1->next);
3963 if (IS_INSN_ID(iobj, getlocal)) {
3965 if (IS_NEXT_INSN_ID(niobj, dup)) {
3966 niobj = niobj->next;
3968 if (IS_NEXT_INSN_ID(niobj, setlocal)) {
3970 if (OPERAND_AT(iobj, 0) == OPERAND_AT(set1, 0) &&
3971 OPERAND_AT(iobj, 1) == OPERAND_AT(set1, 1)) {
3987 if (IS_INSN_ID(iobj, opt_invokebuiltin_delegate)) {
3988 if (IS_TRACE(iobj->link.next)) {
3989 if (IS_NEXT_INSN_ID(iobj->link.next, leave)) {
3990 iobj->insn_id = BIN(opt_invokebuiltin_delegate_leave);
3992 if (iobj == (
INSN *)list && bf->argc == 0 && (iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF)) {
3993 iseq->body->builtin_attrs |= BUILTIN_ATTR_SINGLE_NOARG_LEAF;
4006 if (IS_INSN_ID(iobj, getblockparam)) {
4007 if (IS_NEXT_INSN_ID(&iobj->link, branchif) || IS_NEXT_INSN_ID(&iobj->link, branchunless)) {
4008 iobj->insn_id = BIN(getblockparamproxy);
4012 if (IS_INSN_ID(iobj, splatarray) && OPERAND_AT(iobj, 0) ==
false) {
4014 if (IS_NEXT_INSN_ID(niobj, duphash)) {
4015 niobj = niobj->next;
4017 unsigned int set_flags = 0, unset_flags = 0;
4030 if (IS_NEXT_INSN_ID(niobj, send)) {
4031 siobj = niobj->next;
4032 set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT;
4033 unset_flags = VM_CALL_ARGS_BLOCKARG;
4048 else if ((IS_NEXT_INSN_ID(niobj, getlocal) || IS_NEXT_INSN_ID(niobj, getinstancevariable) ||
4049 IS_NEXT_INSN_ID(niobj, getblockparamproxy)) && (IS_NEXT_INSN_ID(niobj->next, send))) {
4050 siobj = niobj->next->next;
4051 set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT|VM_CALL_ARGS_BLOCKARG;
4056 unsigned int flags = vm_ci_flag(ci);
4057 if ((flags & set_flags) == set_flags && !(flags & unset_flags)) {
4058 ((
INSN*)niobj)->insn_id = BIN(putobject);
4059 OPERAND_AT(niobj, 0) =
rb_hash_freeze(rb_hash_resurrect(OPERAND_AT(niobj, 0)));
4061 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
4062 flags & ~VM_CALL_KW_SPLAT_MUT, vm_ci_argc(ci), vm_ci_kwarg(ci));
4064 OPERAND_AT(siobj, 0) = (
VALUE)nci;
4074 insn_set_specialized_instruction(
rb_iseq_t *iseq,
INSN *iobj,
int insn_id)
4076 iobj->insn_id = insn_id;
4077 iobj->operand_size = insn_len(insn_id) - 1;
4080 if (insn_id == BIN(opt_neq)) {
4081 VALUE original_ci = iobj->operands[0];
4082 iobj->operand_size = 2;
4083 iobj->operands = compile_data_calloc2(iseq, iobj->operand_size,
sizeof(
VALUE));
4084 iobj->operands[0] = (
VALUE)new_callinfo(iseq, idEq, 1, 0, NULL, FALSE);
4085 iobj->operands[1] = original_ci;
4094 if (IS_INSN_ID(iobj, newarray) && iobj->link.next &&
4095 IS_INSN(iobj->link.next)) {
4099 INSN *niobj = (
INSN *)iobj->link.next;
4100 if (IS_INSN_ID(niobj, send)) {
4102 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0) {
4104 switch (vm_ci_mid(ci)) {
4106 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_MAX);
4109 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_MIN);
4112 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_HASH);
4117 VALUE num = iobj->operands[0];
4118 int operand_len = insn_len(BIN(opt_newarray_send)) - 1;
4119 iobj->insn_id = BIN(opt_newarray_send);
4120 iobj->operands = compile_data_calloc2(iseq, operand_len,
sizeof(
VALUE));
4121 iobj->operands[0] = num;
4122 iobj->operands[1] = method;
4123 iobj->operand_size = operand_len;
4124 ELEM_REMOVE(&niobj->link);
4129 else if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4131 IS_NEXT_INSN_ID(&niobj->link, send)) {
4133 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idPack) {
4134 VALUE num = iobj->operands[0];
4135 int operand_len = insn_len(BIN(opt_newarray_send)) - 1;
4136 iobj->insn_id = BIN(opt_newarray_send);
4137 iobj->operands = compile_data_calloc2(iseq, operand_len,
sizeof(
VALUE));
4138 iobj->operands[0] = FIXNUM_INC(num, 1);
4139 iobj->operands[1] =
INT2FIX(VM_OPT_NEWARRAY_SEND_PACK);
4140 iobj->operand_size = operand_len;
4141 ELEM_REMOVE(&iobj->link);
4142 ELEM_REMOVE(niobj->link.next);
4143 ELEM_INSERT_NEXT(&niobj->link, &iobj->link);
4149 else if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4151 IS_NEXT_INSN_ID(&niobj->link, getlocal) &&
4152 (niobj->link.next && IS_NEXT_INSN_ID(niobj->link.next, send))) {
4155 if (vm_ci_mid(ci) == idPack && vm_ci_argc(ci) == 2 &&
4156 (kwarg && kwarg->keyword_len == 1 && kwarg->keywords[0] ==
rb_id2sym(idBuffer))) {
4157 VALUE num = iobj->operands[0];
4158 int operand_len = insn_len(BIN(opt_newarray_send)) - 1;
4159 iobj->insn_id = BIN(opt_newarray_send);
4160 iobj->operands = compile_data_calloc2(iseq, operand_len,
sizeof(
VALUE));
4161 iobj->operands[0] = FIXNUM_INC(num, 2);
4162 iobj->operands[1] =
INT2FIX(VM_OPT_NEWARRAY_SEND_PACK_BUFFER);
4163 iobj->operand_size = operand_len;
4165 ELEM_REMOVE((niobj->link.next)->next);
4167 ELEM_REMOVE(&iobj->link);
4169 ELEM_INSERT_NEXT(niobj->link.next, &iobj->link);
4175 if (IS_INSN_ID(iobj, send)) {
4179 #define SP_INSN(opt) insn_set_specialized_instruction(iseq, iobj, BIN(opt_##opt))
4180 if (vm_ci_simple(ci)) {
4181 switch (vm_ci_argc(ci)) {
4183 switch (vm_ci_mid(ci)) {
4184 case idLength: SP_INSN(length);
return COMPILE_OK;
4185 case idSize: SP_INSN(size);
return COMPILE_OK;
4186 case idEmptyP: SP_INSN(empty_p);
return COMPILE_OK;
4187 case idNilP: SP_INSN(nil_p);
return COMPILE_OK;
4188 case idSucc: SP_INSN(succ);
return COMPILE_OK;
4189 case idNot: SP_INSN(not);
return COMPILE_OK;
4193 switch (vm_ci_mid(ci)) {
4194 case idPLUS: SP_INSN(plus);
return COMPILE_OK;
4195 case idMINUS: SP_INSN(minus);
return COMPILE_OK;
4196 case idMULT: SP_INSN(mult);
return COMPILE_OK;
4197 case idDIV: SP_INSN(div);
return COMPILE_OK;
4198 case idMOD: SP_INSN(mod);
return COMPILE_OK;
4199 case idEq: SP_INSN(eq);
return COMPILE_OK;
4200 case idNeq: SP_INSN(neq);
return COMPILE_OK;
4201 case idEqTilde:SP_INSN(regexpmatch2);
return COMPILE_OK;
4202 case idLT: SP_INSN(lt);
return COMPILE_OK;
4203 case idLE: SP_INSN(le);
return COMPILE_OK;
4204 case idGT: SP_INSN(gt);
return COMPILE_OK;
4205 case idGE: SP_INSN(ge);
return COMPILE_OK;
4206 case idLTLT: SP_INSN(ltlt);
return COMPILE_OK;
4207 case idAREF: SP_INSN(aref);
return COMPILE_OK;
4208 case idAnd: SP_INSN(and);
return COMPILE_OK;
4209 case idOr: SP_INSN(or);
return COMPILE_OK;
4213 switch (vm_ci_mid(ci)) {
4214 case idASET: SP_INSN(aset);
return COMPILE_OK;
4220 if ((vm_ci_flag(ci) & (VM_CALL_ARGS_BLOCKARG | VM_CALL_FORWARDING)) == 0 && blockiseq == NULL) {
4221 iobj->insn_id = BIN(opt_send_without_block);
4222 iobj->operand_size = insn_len(iobj->insn_id) - 1;
4233 switch (ISEQ_BODY(iseq)->
type) {
4235 case ISEQ_TYPE_EVAL:
4236 case ISEQ_TYPE_MAIN:
4238 case ISEQ_TYPE_RESCUE:
4239 case ISEQ_TYPE_ENSURE:
4251 const int do_peepholeopt = ISEQ_COMPILE_DATA(iseq)->option->peephole_optimization;
4252 const int do_tailcallopt = tailcallable_p(iseq) &&
4253 ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization;
4254 const int do_si = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction;
4255 const int do_ou = ISEQ_COMPILE_DATA(iseq)->option->operands_unification;
4256 int rescue_level = 0;
4257 int tailcallopt = do_tailcallopt;
4259 list = FIRST_ELEMENT(anchor);
4261 int do_block_optimization = 0;
4263 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_BLOCK && !ISEQ_COMPILE_DATA(iseq)->catch_except_p) {
4264 do_block_optimization = 1;
4268 if (IS_INSN(list)) {
4269 if (do_peepholeopt) {
4270 iseq_peephole_optimize(iseq, list, tailcallopt);
4273 iseq_specialized_instruction(iseq, (
INSN *)list);
4276 insn_operands_unification((
INSN *)list);
4279 if (do_block_optimization) {
4281 if (IS_INSN_ID(item, jump)) {
4282 do_block_optimization = 0;
4286 if (IS_LABEL(list)) {
4287 switch (((
LABEL *)list)->rescued) {
4288 case LABEL_RESCUE_BEG:
4290 tailcallopt = FALSE;
4292 case LABEL_RESCUE_END:
4293 if (!--rescue_level) tailcallopt = do_tailcallopt;
4300 if (do_block_optimization) {
4302 if (IS_INSN(le) && IS_INSN_ID((
INSN *)le, nop)) {
4309 #if OPT_INSTRUCTIONS_UNIFICATION
4321 for (i = 0; i < size; i++) {
4322 iobj = (
INSN *)list;
4323 argc += iobj->operand_size;
4328 ptr = operands = compile_data_alloc2(iseq,
sizeof(
VALUE), argc);
4333 for (i = 0; i < size; i++) {
4334 iobj = (
INSN *)list;
4336 ptr += iobj->operand_size;
4340 return new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, insn_id, argc, operands);
4352 #if OPT_INSTRUCTIONS_UNIFICATION
4358 list = FIRST_ELEMENT(anchor);
4360 if (IS_INSN(list)) {
4361 iobj = (
INSN *)list;
4363 if (unified_insns_data[
id] != 0) {
4364 const int *
const *entry = unified_insns_data[id];
4365 for (j = 1; j < (intptr_t)entry[0]; j++) {
4366 const int *unified = entry[j];
4368 for (k = 2; k < unified[1]; k++) {
4370 ((
INSN *)li)->insn_id != unified[k]) {
4377 new_unified_insn(iseq, unified[0], unified[1] - 1,
4382 niobj->link.next = li;
4401 all_string_result_p(
const NODE *node)
4403 if (!node)
return FALSE;
4404 switch (nd_type(node)) {
4405 case NODE_STR:
case NODE_DSTR:
case NODE_FILE:
4407 case NODE_IF:
case NODE_UNLESS:
4408 if (!RNODE_IF(node)->nd_body || !RNODE_IF(node)->nd_else)
return FALSE;
4409 if (all_string_result_p(RNODE_IF(node)->nd_body))
4410 return all_string_result_p(RNODE_IF(node)->nd_else);
4412 case NODE_AND:
case NODE_OR:
4413 if (!RNODE_AND(node)->nd_2nd)
4414 return all_string_result_p(RNODE_AND(node)->nd_1st);
4415 if (!all_string_result_p(RNODE_AND(node)->nd_1st))
4417 return all_string_result_p(RNODE_AND(node)->nd_2nd);
4426 const struct RNode_LIST *list = RNODE_DSTR(node)->nd_next;
4427 VALUE lit = rb_node_dstr_string_val(node);
4431 debugp_param(
"nd_lit", lit);
4435 COMPILE_ERROR(ERROR_ARGS
"dstr: must be string: %s",
4436 rb_builtin_type_name(
TYPE(lit)));
4439 lit = rb_fstring(lit);
4440 ADD_INSN1(ret, node, putobject, lit);
4442 if (
RSTRING_LEN(lit) == 0) first_lit = LAST_ELEMENT(ret);
4446 const NODE *
const head = list->nd_head;
4447 if (nd_type_p(head, NODE_STR)) {
4448 lit = rb_node_str_string_val(head);
4449 ADD_INSN1(ret, head, putobject, lit);
4454 CHECK(COMPILE(ret,
"each string", head));
4459 if (
NIL_P(lit) && first_lit) {
4460 ELEM_REMOVE(first_lit);
4471 while (node && nd_type_p(node, NODE_BLOCK)) {
4472 CHECK(COMPILE_(ret,
"BLOCK body", RNODE_BLOCK(node)->nd_head,
4473 (RNODE_BLOCK(node)->nd_next ? 1 : popped)));
4474 node = RNODE_BLOCK(node)->nd_next;
4477 CHECK(COMPILE_(ret,
"BLOCK next", RNODE_BLOCK(node)->nd_next, popped));
4486 if (!RNODE_DSTR(node)->nd_next) {
4487 VALUE lit = rb_node_dstr_string_val(node);
4488 ADD_INSN1(ret, node, putstring, lit);
4492 CHECK(compile_dstr_fragments(iseq, ret, node, &cnt));
4493 ADD_INSN1(ret, node, concatstrings,
INT2FIX(cnt));
4502 int cflag = (int)RNODE_DREGX(node)->as.nd_cflag;
4504 if (!RNODE_DREGX(node)->nd_next) {
4506 VALUE src = rb_node_dregx_string_val(node);
4507 VALUE match = rb_reg_compile(src, cflag, NULL, 0);
4508 ADD_INSN1(ret, node, putobject, match);
4514 CHECK(compile_dstr_fragments(iseq, ret, node, &cnt));
4518 ADD_INSN(ret, node, pop);
4528 const int line = nd_line(node);
4529 LABEL *lend = NEW_LABEL(line);
4530 rb_num_t cnt = ISEQ_FLIP_CNT_INCREMENT(ISEQ_BODY(iseq)->local_iseq)
4531 + VM_SVAR_FLIPFLOP_START;
4534 ADD_INSN2(ret, node, getspecial, key,
INT2FIX(0));
4535 ADD_INSNL(ret, node, branchif, lend);
4538 CHECK(COMPILE(ret,
"flip2 beg", RNODE_FLIP2(node)->nd_beg));
4539 ADD_INSNL(ret, node, branchunless, else_label);
4540 ADD_INSN1(ret, node, putobject,
Qtrue);
4541 ADD_INSN1(ret, node, setspecial, key);
4543 ADD_INSNL(ret, node, jump, then_label);
4547 ADD_LABEL(ret, lend);
4548 CHECK(COMPILE(ret,
"flip2 end", RNODE_FLIP2(node)->nd_end));
4549 ADD_INSNL(ret, node, branchunless, then_label);
4550 ADD_INSN1(ret, node, putobject,
Qfalse);
4551 ADD_INSN1(ret, node, setspecial, key);
4552 ADD_INSNL(ret, node, jump, then_label);
4561 #define COMPILE_SINGLE 2
4568 LABEL *label = NEW_LABEL(nd_line(cond));
4569 if (!then_label) then_label = label;
4570 else if (!else_label) else_label = label;
4572 CHECK(compile_branch_condition(iseq, seq, cond, then_label, else_label));
4574 if (LIST_INSN_SIZE_ONE(seq)) {
4575 INSN *insn = (
INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(seq));
4576 if (insn->insn_id == BIN(jump) && (
LABEL *)(insn->operands[0]) == label)
4579 if (!label->refcnt) {
4580 return COMPILE_SINGLE;
4582 ADD_LABEL(seq, label);
4592 DECL_ANCHOR(ignore);
4595 switch (nd_type(cond)) {
4597 CHECK(ok = compile_logical(iseq, ret, RNODE_AND(cond)->nd_1st, NULL, else_label));
4598 cond = RNODE_AND(cond)->nd_2nd;
4599 if (ok == COMPILE_SINGLE) {
4600 INIT_ANCHOR(ignore);
4602 then_label = NEW_LABEL(nd_line(cond));
4606 CHECK(ok = compile_logical(iseq, ret, RNODE_OR(cond)->nd_1st, then_label, NULL));
4607 cond = RNODE_OR(cond)->nd_2nd;
4608 if (ok == COMPILE_SINGLE) {
4609 INIT_ANCHOR(ignore);
4611 else_label = NEW_LABEL(nd_line(cond));
4621 case NODE_IMAGINARY:
4628 ADD_INSNL(ret, cond, jump, then_label);
4633 ADD_INSNL(ret, cond, jump, else_label);
4639 CHECK(COMPILE_POPPED(ret,
"branch condition", cond));
4640 ADD_INSNL(ret, cond, jump, then_label);
4643 CHECK(compile_flip_flop(iseq, ret, cond, TRUE, then_label, else_label));
4646 CHECK(compile_flip_flop(iseq, ret, cond, FALSE, then_label, else_label));
4649 CHECK(compile_defined_expr(iseq, ret, cond,
Qfalse, ret == ignore));
4653 DECL_ANCHOR(cond_seq);
4654 INIT_ANCHOR(cond_seq);
4656 CHECK(COMPILE(cond_seq,
"branch condition", cond));
4658 if (LIST_INSN_SIZE_ONE(cond_seq)) {
4659 INSN *insn = (
INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(cond_seq));
4660 if (insn->insn_id == BIN(putobject)) {
4661 if (
RTEST(insn->operands[0])) {
4662 ADD_INSNL(ret, cond, jump, then_label);
4667 ADD_INSNL(ret, cond, jump, else_label);
4672 ADD_SEQ(ret, cond_seq);
4677 ADD_INSNL(ret, cond, branchunless, else_label);
4678 ADD_INSNL(ret, cond, jump, then_label);
4682 #define HASH_BRACE 1
4685 keyword_node_p(
const NODE *
const node)
4687 return nd_type_p(node, NODE_HASH) && (RNODE_HASH(node)->nd_brace & HASH_BRACE) != HASH_BRACE;
4693 switch (nd_type(node)) {
4695 return rb_node_sym_string_val(node);
4697 UNKNOWN_NODE(
"get_symbol_value", node,
Qnil);
4704 NODE *node = node_hash->nd_head;
4708 for (
int i = 0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4709 VALUE key = get_symbol_value(iseq, RNODE_LIST(node)->nd_head);
4725 const NODE *
const root_node,
4733 if (RNODE_HASH(root_node)->nd_head && nd_type_p(RNODE_HASH(root_node)->nd_head, NODE_LIST)) {
4734 const NODE *node = RNODE_HASH(root_node)->nd_head;
4738 const NODE *key_node = RNODE_LIST(node)->nd_head;
4742 if (key_node && nd_type_p(key_node, NODE_SYM)) {
4747 *flag |= VM_CALL_KW_SPLAT;
4748 if (seen_nodes > 1 || RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4753 *flag |= VM_CALL_KW_SPLAT_MUT;
4758 node = RNODE_LIST(node)->nd_next;
4759 node = RNODE_LIST(node)->nd_next;
4763 node = RNODE_HASH(root_node)->nd_head;
4766 VALUE key_index = node_hash_unique_key_index(iseq, RNODE_HASH(root_node), &
len);
4769 VALUE *keywords = kw_arg->keywords;
4772 kw_arg->references = 0;
4773 kw_arg->keyword_len =
len;
4775 *kw_arg_ptr = kw_arg;
4777 for (i=0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4778 const NODE *key_node = RNODE_LIST(node)->nd_head;
4779 const NODE *val_node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
4782 keywords[j] = get_symbol_value(iseq, key_node);
4786 NO_CHECK(COMPILE_(ret,
"keyword values", val_node, popped));
4800 for (; node;
len++, node = RNODE_LIST(node)->nd_next) {
4802 EXPECT_NODE(
"compile_args", node, NODE_LIST, -1);
4805 if (RNODE_LIST(node)->nd_next == NULL && keyword_node_p(RNODE_LIST(node)->nd_head)) {
4806 *kwnode_ptr = RNODE_LIST(node)->nd_head;
4809 RUBY_ASSERT(!keyword_node_p(RNODE_LIST(node)->nd_head));
4810 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, FALSE));
4818 frozen_string_literal_p(
const rb_iseq_t *iseq)
4820 return ISEQ_COMPILE_DATA(iseq)->option->frozen_string_literal > 0;
4824 static_literal_node_p(
const NODE *node,
const rb_iseq_t *iseq,
bool hash_key)
4826 switch (nd_type(node)) {
4834 case NODE_IMAGINARY:
4841 return hash_key || frozen_string_literal_p(iseq);
4850 switch (nd_type(node)) {
4852 return rb_node_integer_literal_val(node);
4854 return rb_node_float_literal_val(node);
4856 return rb_node_rational_literal_val(node);
4857 case NODE_IMAGINARY:
4858 return rb_node_imaginary_literal_val(node);
4866 return rb_node_sym_string_val(node);
4868 return rb_node_regx_string_val(node);
4870 return rb_node_line_lineno_val(node);
4872 return rb_node_encoding_val(node);
4875 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal ||
RTEST(
ruby_debug)) {
4876 VALUE lit = get_string_value(node);
4877 return rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), (
int)nd_line(node));
4880 return get_string_value(node);
4883 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
4890 const NODE *line_node = node;
4892 if (nd_type_p(node, NODE_ZLIST)) {
4894 ADD_INSN1(ret, line_node, newarray,
INT2FIX(0));
4899 EXPECT_NODE(
"compile_array", node, NODE_LIST, -1);
4902 for (; node; node = RNODE_LIST(node)->nd_next) {
4903 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, popped));
4945 const int max_stack_len = 0x100;
4946 const int min_tmp_ary_len = 0x40;
4950 #define FLUSH_CHUNK \
4952 if (first_chunk) ADD_INSN1(ret, line_node, newarray, INT2FIX(stack_len)); \
4953 else ADD_INSN1(ret, line_node, pushtoarray, INT2FIX(stack_len)); \
4954 first_chunk = FALSE; \
4962 if (static_literal_node_p(RNODE_LIST(node)->nd_head, iseq,
false)) {
4964 const NODE *node_tmp = RNODE_LIST(node)->nd_next;
4965 for (; node_tmp && static_literal_node_p(RNODE_LIST(node_tmp)->nd_head, iseq,
false); node_tmp = RNODE_LIST(node_tmp)->nd_next)
4968 if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_ary_len) {
4973 for (; count; count--, node = RNODE_LIST(node)->nd_next)
4974 rb_ary_push(ary, static_literal_value(RNODE_LIST(node)->nd_head, iseq));
4980 ADD_INSN1(ret, line_node, duparray, ary);
4981 first_chunk = FALSE;
4984 ADD_INSN1(ret, line_node, putobject, ary);
4985 ADD_INSN(ret, line_node, concattoarray);
4992 for (; count; count--, node = RNODE_LIST(node)->nd_next) {
4994 EXPECT_NODE(
"compile_array", node, NODE_LIST, -1);
4997 if (!RNODE_LIST(node)->nd_next && keyword_node_p(RNODE_LIST(node)->nd_head)) {
4999 if (stack_len == 0 && first_chunk) {
5000 ADD_INSN1(ret, line_node, newarray,
INT2FIX(0));
5005 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, 0));
5006 ADD_INSN(ret, line_node, pushtoarraykwsplat);
5010 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, 0));
5015 if (stack_len >= max_stack_len) FLUSH_CHUNK;
5025 static_literal_node_pair_p(
const NODE *node,
const rb_iseq_t *iseq)
5027 return RNODE_LIST(node)->nd_head && static_literal_node_p(RNODE_LIST(node)->nd_head, iseq,
true) && static_literal_node_p(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq,
false);
5033 const NODE *line_node = node;
5035 node = RNODE_HASH(node)->nd_head;
5037 if (!node || nd_type_p(node, NODE_ZLIST)) {
5039 ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5044 EXPECT_NODE(
"compile_hash", node, NODE_LIST, -1);
5047 for (; node; node = RNODE_LIST(node)->nd_next) {
5048 NO_CHECK(COMPILE_(ret,
"hash element", RNODE_LIST(node)->nd_head, popped));
5071 const int max_stack_len = 0x100;
5072 const int min_tmp_hash_len = 0x800;
5074 int first_chunk = 1;
5075 DECL_ANCHOR(anchor);
5076 INIT_ANCHOR(anchor);
5079 #define FLUSH_CHUNK() \
5081 if (first_chunk) { \
5082 APPEND_LIST(ret, anchor); \
5083 ADD_INSN1(ret, line_node, newhash, INT2FIX(stack_len)); \
5086 ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
5087 ADD_INSN(ret, line_node, swap); \
5088 APPEND_LIST(ret, anchor); \
5089 ADD_SEND(ret, line_node, id_core_hash_merge_ptr, INT2FIX(stack_len + 1)); \
5091 INIT_ANCHOR(anchor); \
5092 first_chunk = stack_len = 0; \
5099 if (static_literal_node_pair_p(node, iseq)) {
5101 const NODE *node_tmp = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5102 for (; node_tmp && static_literal_node_pair_p(node_tmp, iseq); node_tmp = RNODE_LIST(RNODE_LIST(node_tmp)->nd_next)->nd_next)
5105 if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_hash_len) {
5110 for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5112 elem[0] = static_literal_value(RNODE_LIST(node)->nd_head, iseq);
5113 elem[1] = static_literal_value(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq);
5124 ADD_INSN1(ret, line_node, duphash, hash);
5128 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5129 ADD_INSN(ret, line_node, swap);
5131 ADD_INSN1(ret, line_node, putobject, hash);
5133 ADD_SEND(ret, line_node, id_core_hash_merge_kwd,
INT2FIX(2));
5140 for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5143 EXPECT_NODE(
"compile_hash", node, NODE_LIST, -1);
5146 if (RNODE_LIST(node)->nd_head) {
5148 NO_CHECK(COMPILE_(anchor,
"hash key element", RNODE_LIST(node)->nd_head, 0));
5149 NO_CHECK(COMPILE_(anchor,
"hash value element", RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, 0));
5153 if (stack_len >= max_stack_len) FLUSH_CHUNK();
5159 const NODE *kw = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
5160 int empty_kw = nd_type_p(kw, NODE_HASH) && (!RNODE_HASH(kw)->nd_head);
5161 int first_kw = first_chunk && stack_len == 0;
5162 int last_kw = !RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5163 int only_kw = last_kw && first_kw;
5165 empty_kw = empty_kw || nd_type_p(kw, NODE_NIL);
5167 if (only_kw && method_call_keywords) {
5175 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5177 else if (first_kw) {
5181 ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5188 if (only_kw && method_call_keywords) {
5194 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5201 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5202 if (first_kw) ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5203 else ADD_INSN(ret, line_node, swap);
5205 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5207 ADD_SEND(ret, line_node, id_core_hash_merge_kwd,
INT2FIX(2));
5222 rb_node_case_when_optimizable_literal(
const NODE *
const node)
5224 switch (nd_type(node)) {
5226 return rb_node_integer_literal_val(node);
5228 VALUE v = rb_node_float_literal_val(node);
5237 case NODE_IMAGINARY:
5246 return rb_node_sym_string_val(node);
5248 return rb_node_line_lineno_val(node);
5250 return rb_node_str_string_val(node);
5252 return rb_node_file_path_val(node);
5259 LABEL *l1,
int only_special_literals,
VALUE literals)
5262 const NODE *val = RNODE_LIST(vals)->nd_head;
5263 VALUE lit = rb_node_case_when_optimizable_literal(val);
5266 only_special_literals = 0;
5272 if (nd_type_p(val, NODE_STR) || nd_type_p(val, NODE_FILE)) {
5273 debugp_param(
"nd_lit", get_string_value(val));
5274 lit = get_string_value(val);
5275 ADD_INSN1(cond_seq, val, putobject, lit);
5279 if (!COMPILE(cond_seq,
"when cond", val))
return -1;
5283 ADD_INSN1(cond_seq, vals, topn,
INT2FIX(1));
5284 ADD_CALL(cond_seq, vals, idEqq,
INT2FIX(1));
5285 ADD_INSNL(cond_seq, val, branchif, l1);
5286 vals = RNODE_LIST(vals)->nd_next;
5288 return only_special_literals;
5293 LABEL *l1,
int only_special_literals,
VALUE literals)
5295 const NODE *line_node = vals;
5297 switch (nd_type(vals)) {
5299 if (when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals) < 0)
5303 ADD_INSN (cond_seq, line_node, dup);
5304 CHECK(COMPILE(cond_seq,
"when splat", RNODE_SPLAT(vals)->nd_head));
5305 ADD_INSN1(cond_seq, line_node, splatarray,
Qfalse);
5306 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5307 ADD_INSNL(cond_seq, line_node, branchif, l1);
5310 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_head, l1, only_special_literals, literals));
5311 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_body, l1, only_special_literals, literals));
5314 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSPUSH(vals)->nd_head, l1, only_special_literals, literals));
5315 ADD_INSN (cond_seq, line_node, dup);
5316 CHECK(COMPILE(cond_seq,
"when argspush body", RNODE_ARGSPUSH(vals)->nd_body));
5317 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
5318 ADD_INSNL(cond_seq, line_node, branchif, l1);
5321 ADD_INSN (cond_seq, line_node, dup);
5322 CHECK(COMPILE(cond_seq,
"when val", vals));
5323 ADD_INSN1(cond_seq, line_node, splatarray,
Qfalse);
5324 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5325 ADD_INSNL(cond_seq, line_node, branchif, l1);
5418 const NODE *line_node;
5433 add_masgn_lhs_node(
struct masgn_state *state,
int lhs_pos,
const NODE *line_node,
int argc,
INSN *before_insn)
5436 rb_bug(
"no masgn_state");
5445 memo->before_insn = before_insn;
5446 memo->line_node = line_node;
5447 memo->argn = state->num_args + 1;
5448 memo->num_args = argc;
5449 state->num_args += argc;
5450 memo->lhs_pos = lhs_pos;
5452 if (!state->first_memo) {
5453 state->first_memo = memo;
5456 state->last_memo->next = memo;
5458 state->last_memo = memo;
5468 switch (nd_type(node)) {
5469 case NODE_ATTRASGN: {
5471 const NODE *line_node = node;
5473 CHECK(COMPILE_POPPED(pre,
"masgn lhs (NODE_ATTRASGN)", node));
5475 bool safenav_call =
false;
5477 iobj = (
INSN *)get_prev_insn((
INSN *)insn_element);
5479 ELEM_REMOVE(insn_element);
5480 if (!IS_INSN_ID(iobj, send)) {
5481 safenav_call =
true;
5482 iobj = (
INSN *)get_prev_insn(iobj);
5483 ELEM_INSERT_NEXT(&iobj->link, insn_element);
5485 (pre->last = iobj->link.prev)->next = 0;
5488 int argc = vm_ci_argc(ci) + 1;
5489 ci = ci_argc_set(iseq, ci, argc);
5490 OPERAND_AT(iobj, 0) = (
VALUE)ci;
5494 ADD_INSN(lhs, line_node, swap);
5497 ADD_INSN1(lhs, line_node, topn,
INT2FIX(argc));
5500 if (!add_masgn_lhs_node(state, lhs_pos, line_node, argc, (
INSN *)LAST_ELEMENT(lhs))) {
5504 iobj->link.prev = lhs->last;
5505 lhs->last->next = &iobj->link;
5506 for (lhs->last = &iobj->link; lhs->last->next; lhs->last = lhs->last->next);
5507 if (vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) {
5508 int argc = vm_ci_argc(ci);
5509 bool dupsplat =
false;
5510 ci = ci_argc_set(iseq, ci, argc - 1);
5511 if (!(vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT_MUT)) {
5518 ci = ci_flag_set(iseq, ci, VM_CALL_ARGS_SPLAT_MUT);
5520 OPERAND_AT(iobj, 0) = (
VALUE)ci;
5529 int line_no = nd_line(line_node);
5530 int node_id = nd_node_id(line_node);
5533 INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5534 INSERT_BEFORE_INSN1(iobj, line_no, node_id, splatarray,
Qtrue);
5535 INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5537 INSERT_BEFORE_INSN1(iobj, line_no, node_id, pushtoarray,
INT2FIX(1));
5539 if (!safenav_call) {
5540 ADD_INSN(lhs, line_node, pop);
5542 ADD_INSN(lhs, line_node, pop);
5545 for (
int i=0; i < argc; i++) {
5546 ADD_INSN(post, line_node, pop);
5551 DECL_ANCHOR(nest_rhs);
5552 INIT_ANCHOR(nest_rhs);
5553 DECL_ANCHOR(nest_lhs);
5554 INIT_ANCHOR(nest_lhs);
5556 int prev_level = state->lhs_level;
5557 bool prev_nested = state->nested;
5559 state->lhs_level = lhs_pos - 1;
5560 CHECK(compile_massign0(iseq, pre, nest_rhs, nest_lhs, post, node, state, 1));
5561 state->lhs_level = prev_level;
5562 state->nested = prev_nested;
5564 ADD_SEQ(lhs, nest_rhs);
5565 ADD_SEQ(lhs, nest_lhs);
5569 if (!RNODE_CDECL(node)->nd_vid) {
5573 CHECK(COMPILE_POPPED(pre,
"masgn lhs (NODE_CDECL)", node));
5576 iobj = (
INSN *)insn_element;
5579 ELEM_REMOVE(insn_element);
5580 pre->last = iobj->link.prev;
5583 if (!add_masgn_lhs_node(state, lhs_pos, node, 1, (
INSN *)LAST_ELEMENT(lhs))) {
5587 ADD_INSN(post, node, pop);
5592 DECL_ANCHOR(anchor);
5593 INIT_ANCHOR(anchor);
5594 CHECK(COMPILE_POPPED(anchor,
"masgn lhs", node));
5595 ELEM_REMOVE(FIRST_ELEMENT(anchor));
5596 ADD_SEQ(lhs, anchor);
5607 CHECK(compile_massign_opt_lhs(iseq, ret, RNODE_LIST(lhsn)->nd_next));
5608 CHECK(compile_massign_lhs(iseq, ret, ret, ret, ret, RNODE_LIST(lhsn)->nd_head, NULL, 0));
5615 const NODE *rhsn,
const NODE *orig_lhsn)
5618 const int memsize = numberof(mem);
5620 int llen = 0, rlen = 0;
5622 const NODE *lhsn = orig_lhsn;
5624 #define MEMORY(v) { \
5626 if (memindex == memsize) return 0; \
5627 for (i=0; i<memindex; i++) { \
5628 if (mem[i] == (v)) return 0; \
5630 mem[memindex++] = (v); \
5633 if (rhsn == 0 || !nd_type_p(rhsn, NODE_LIST)) {
5638 const NODE *ln = RNODE_LIST(lhsn)->nd_head;
5639 switch (nd_type(ln)) {
5644 MEMORY(get_nd_vid(ln));
5649 lhsn = RNODE_LIST(lhsn)->nd_next;
5655 NO_CHECK(COMPILE_POPPED(ret,
"masgn val (popped)", RNODE_LIST(rhsn)->nd_head));
5658 NO_CHECK(COMPILE(ret,
"masgn val", RNODE_LIST(rhsn)->nd_head));
5660 rhsn = RNODE_LIST(rhsn)->nd_next;
5665 for (i=0; i<llen-rlen; i++) {
5666 ADD_INSN(ret, orig_lhsn, putnil);
5670 compile_massign_opt_lhs(iseq, ret, orig_lhsn);
5677 const NODE *rhsn = RNODE_MASGN(node)->nd_value;
5678 const NODE *splatn = RNODE_MASGN(node)->nd_args;
5679 const NODE *lhsn = RNODE_MASGN(node)->nd_head;
5680 const NODE *lhsn_count = lhsn;
5681 int lhs_splat = (splatn && NODE_NAMED_REST_P(splatn)) ? 1 : 0;
5686 while (lhsn_count) {
5688 lhsn_count = RNODE_LIST(lhsn_count)->nd_next;
5691 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(lhsn)->nd_head, state, (llen - lpos) + lhs_splat + state->lhs_level));
5693 lhsn = RNODE_LIST(lhsn)->nd_next;
5697 if (nd_type_p(splatn, NODE_POSTARG)) {
5699 const NODE *postn = RNODE_POSTARG(splatn)->nd_2nd;
5700 const NODE *restn = RNODE_POSTARG(splatn)->nd_1st;
5701 int plen = (int)RNODE_LIST(postn)->as.nd_alen;
5703 int flag = 0x02 | (NODE_NAMED_REST_P(restn) ? 0x01 : 0x00);
5705 ADD_INSN2(lhs, splatn, expandarray,
INT2FIX(plen),
INT2FIX(flag));
5707 if (NODE_NAMED_REST_P(restn)) {
5708 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, restn, state, 1 + plen + state->lhs_level));
5711 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(postn)->nd_head, state, (plen - ppos) + state->lhs_level));
5713 postn = RNODE_LIST(postn)->nd_next;
5718 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, splatn, state, 1 + state->lhs_level));
5722 if (!state->nested) {
5723 NO_CHECK(COMPILE(rhs,
"normal masgn rhs", rhsn));
5727 ADD_INSN(rhs, node, dup);
5729 ADD_INSN2(rhs, node, expandarray,
INT2FIX(llen),
INT2FIX(lhs_splat));
5736 if (!popped || RNODE_MASGN(node)->nd_args || !compile_massign_opt(iseq, ret, RNODE_MASGN(node)->nd_value, RNODE_MASGN(node)->nd_head)) {
5738 state.lhs_level = popped ? 0 : 1;
5741 state.first_memo = NULL;
5742 state.last_memo = NULL;
5752 int ok = compile_massign0(iseq, pre, rhs, lhs, post, node, &state, popped);
5756 VALUE topn_arg =
INT2FIX((state.num_args - memo->argn) + memo->lhs_pos);
5757 for (
int i = 0; i < memo->num_args; i++) {
5758 INSERT_BEFORE_INSN1(memo->before_insn, nd_line(memo->line_node), nd_node_id(memo->line_node), topn, topn_arg);
5760 tmp_memo = memo->next;
5769 if (!popped && state.num_args >= 1) {
5771 ADD_INSN1(ret, node, setn,
INT2FIX(state.num_args));
5783 switch (nd_type(node)) {
5793 node = RNODE_COLON2(node)->nd_head;
5802 compile_const_prefix(
rb_iseq_t *iseq,
const NODE *
const node,
5805 switch (nd_type(node)) {
5807 debugi(
"compile_const_prefix - colon", RNODE_CONST(node)->nd_vid);
5808 ADD_INSN1(body, node, putobject,
Qtrue);
5809 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_CONST(node)->nd_vid));
5812 debugi(
"compile_const_prefix - colon3", RNODE_COLON3(node)->nd_mid);
5813 ADD_INSN(body, node, pop);
5814 ADD_INSN1(body, node, putobject, rb_cObject);
5815 ADD_INSN1(body, node, putobject,
Qtrue);
5816 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_COLON3(node)->nd_mid));
5819 CHECK(compile_const_prefix(iseq, RNODE_COLON2(node)->nd_head, pref, body));
5820 debugi(
"compile_const_prefix - colon2", RNODE_COLON2(node)->nd_mid);
5821 ADD_INSN1(body, node, putobject,
Qfalse);
5822 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_COLON2(node)->nd_mid));
5825 CHECK(COMPILE(pref,
"const colon2 prefix", node));
5834 if (nd_type_p(cpath, NODE_COLON3)) {
5836 ADD_INSN1(ret, cpath, putobject, rb_cObject);
5837 return VM_DEFINECLASS_FLAG_SCOPED;
5839 else if (nd_type_p(cpath, NODE_COLON2) && RNODE_COLON2(cpath)->nd_head) {
5841 NO_CHECK(COMPILE(ret,
"nd_else->nd_head", RNODE_COLON2(cpath)->nd_head));
5842 return VM_DEFINECLASS_FLAG_SCOPED;
5846 ADD_INSN1(ret, cpath, putspecialobject,
5847 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5853 private_recv_p(
const NODE *node)
5855 NODE *recv = get_nd_recv(node);
5856 if (recv && nd_type_p(recv, NODE_SELF)) {
5857 return RNODE_SELF(recv)->nd_state != 0;
5864 const NODE *
const node,
LABEL **lfinish,
VALUE needstr,
bool ignore);
5867 compile_call(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
const enum node_type
type,
const NODE *
const line_node,
int popped,
bool assume_receiver);
5874 enum defined_type expr_type = DEFINED_NOT_DEFINED;
5875 enum node_type
type;
5876 const int line = nd_line(node);
5877 const NODE *line_node = node;
5879 switch (
type = nd_type(node)) {
5883 expr_type = DEFINED_NIL;
5886 expr_type = DEFINED_SELF;
5889 expr_type = DEFINED_TRUE;
5892 expr_type = DEFINED_FALSE;
5897 const NODE *vals = (nd_type(node) == NODE_HASH) ? RNODE_HASH(node)->nd_head : node;
5901 if (RNODE_LIST(vals)->nd_head) {
5902 defined_expr0(iseq, ret, RNODE_LIST(vals)->nd_head, lfinish,
Qfalse,
false);
5905 lfinish[1] = NEW_LABEL(line);
5907 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
5909 }
while ((vals = RNODE_LIST(vals)->nd_next) != NULL);
5922 case NODE_IMAGINARY:
5927 expr_type = DEFINED_EXPR;
5931 defined_expr0(iseq, ret, RNODE_LIST(node)->nd_head, lfinish,
Qfalse,
false);
5933 lfinish[1] = NEW_LABEL(line);
5935 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
5936 expr_type = DEFINED_EXPR;
5942 expr_type = DEFINED_LVAR;
5945 #define PUSH_VAL(type) (needstr == Qfalse ? Qtrue : rb_iseq_defined_string(type))
5947 ADD_INSN3(ret, line_node, definedivar,
5948 ID2SYM(RNODE_IVAR(node)->nd_vid), get_ivar_ic_value(iseq,RNODE_IVAR(node)->nd_vid), PUSH_VAL(DEFINED_IVAR));
5952 ADD_INSN(ret, line_node, putnil);
5953 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_GVAR),
5954 ID2SYM(RNODE_GVAR(node)->nd_vid), PUSH_VAL(DEFINED_GVAR));
5958 ADD_INSN(ret, line_node, putnil);
5959 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CVAR),
5960 ID2SYM(RNODE_CVAR(node)->nd_vid), PUSH_VAL(DEFINED_CVAR));
5964 ADD_INSN(ret, line_node, putnil);
5965 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CONST),
5966 ID2SYM(RNODE_CONST(node)->nd_vid), PUSH_VAL(DEFINED_CONST));
5970 lfinish[1] = NEW_LABEL(line);
5972 defined_expr0(iseq, ret, RNODE_COLON2(node)->nd_head, lfinish,
Qfalse,
false);
5973 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
5974 NO_CHECK(COMPILE(ret,
"defined/colon2#nd_head", RNODE_COLON2(node)->nd_head));
5977 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CONST_FROM),
5978 ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
5981 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_METHOD),
5982 ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_METHOD));
5986 ADD_INSN1(ret, line_node, putobject, rb_cObject);
5987 ADD_INSN3(ret, line_node, defined,
5988 INT2FIX(DEFINED_CONST_FROM),
ID2SYM(RNODE_COLON3(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
5996 case NODE_ATTRASGN:{
5997 const int explicit_receiver =
5998 (
type == NODE_CALL ||
type == NODE_OPCALL ||
5999 (
type == NODE_ATTRASGN && !private_recv_p(node)));
6001 if (get_nd_args(node) || explicit_receiver) {
6003 lfinish[1] = NEW_LABEL(line);
6006 lfinish[2] = NEW_LABEL(line);
6009 if (get_nd_args(node)) {
6010 defined_expr0(iseq, ret, get_nd_args(node), lfinish,
Qfalse,
false);
6011 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6013 if (explicit_receiver) {
6014 defined_expr0(iseq, ret, get_nd_recv(node), lfinish,
Qfalse,
true);
6015 switch (nd_type(get_nd_recv(node))) {
6021 ADD_INSNL(ret, line_node, branchunless, lfinish[2]);
6022 compile_call(iseq, ret, get_nd_recv(node), nd_type(get_nd_recv(node)), line_node, 0,
true);
6025 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6026 NO_CHECK(COMPILE(ret,
"defined/recv", get_nd_recv(node)));
6030 ADD_INSN(ret, line_node, dup);
6032 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_METHOD),
6033 ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6036 ADD_INSN(ret, line_node, putself);
6038 ADD_INSN(ret, line_node, dup);
6040 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_FUNC),
6041 ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6047 ADD_INSN(ret, line_node, putnil);
6048 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_YIELD), 0,
6049 PUSH_VAL(DEFINED_YIELD));
6050 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
6055 ADD_INSN(ret, line_node, putnil);
6056 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_REF),
6057 INT2FIX((RNODE_BACK_REF(node)->nd_nth << 1) | (
type == NODE_BACK_REF)),
6058 PUSH_VAL(DEFINED_GVAR));
6063 ADD_INSN(ret, line_node, putnil);
6064 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_ZSUPER), 0,
6065 PUSH_VAL(DEFINED_ZSUPER));
6071 case NODE_OP_ASGN_OR:
6072 case NODE_OP_ASGN_AND:
6081 expr_type = DEFINED_ASGN;
6088 VALUE str = rb_iseq_defined_string(expr_type);
6089 ADD_INSN1(ret, line_node, putobject, str);
6092 ADD_INSN1(ret, line_node, putobject,
Qtrue);
6099 ADD_SYNTHETIC_INSN(ret, 0, -1, putnil);
6100 iseq_set_exception_local_table(iseq);
6105 const NODE *
const node,
LABEL **lfinish,
VALUE needstr,
bool ignore)
6108 defined_expr0(iseq, ret, node, lfinish, needstr,
false);
6110 int line = nd_line(node);
6111 LABEL *lstart = NEW_LABEL(line);
6112 LABEL *lend = NEW_LABEL(line);
6115 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq, NULL);
6116 rescue = new_child_iseq_with_callback(iseq, ifunc,
6118 ISEQ_BODY(iseq)->location.label),
6119 iseq, ISEQ_TYPE_RESCUE, 0);
6120 lstart->rescued = LABEL_RESCUE_BEG;
6121 lend->rescued = LABEL_RESCUE_END;
6122 APPEND_LABEL(ret, lcur, lstart);
6123 ADD_LABEL(ret, lend);
6125 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lfinish[1]);
6133 const int line = nd_line(node);
6134 const NODE *line_node = node;
6135 if (!RNODE_DEFINED(node)->nd_head) {
6136 VALUE str = rb_iseq_defined_string(DEFINED_NIL);
6137 ADD_INSN1(ret, line_node, putobject, str);
6142 lfinish[0] = NEW_LABEL(line);
6145 defined_expr(iseq, ret, RNODE_DEFINED(node)->nd_head, lfinish, needstr, ignore);
6147 ELEM_INSERT_NEXT(last, &new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(putnil), 0)->link);
6148 ADD_INSN(ret, line_node, swap);
6150 ADD_LABEL(ret, lfinish[2]);
6152 ADD_INSN(ret, line_node, pop);
6153 ADD_LABEL(ret, lfinish[1]);
6155 ADD_LABEL(ret, lfinish[0]);
6161 make_name_for_block(
const rb_iseq_t *orig_iseq)
6166 if (ISEQ_BODY(orig_iseq)->parent_iseq != 0) {
6167 while (ISEQ_BODY(orig_iseq)->local_iseq != iseq) {
6168 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_BLOCK) {
6171 iseq = ISEQ_BODY(iseq)->parent_iseq;
6176 return rb_sprintf(
"block in %"PRIsVALUE, ISEQ_BODY(iseq)->location.label);
6179 return rb_sprintf(
"block (%d levels) in %"PRIsVALUE, level, ISEQ_BODY(iseq)->location.label);
6188 enl->ensure_node = node;
6189 enl->prev = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6191 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl;
6201 while (erange->next != 0) {
6202 erange = erange->next;
6206 ne->end = erange->end;
6207 erange->end = lstart;
6213 can_add_ensure_iseq(
const rb_iseq_t *iseq)
6216 if (ISEQ_COMPILE_DATA(iseq)->in_rescue && (e = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack) != NULL) {
6218 if (e->ensure_node)
return false;
6231 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6233 DECL_ANCHOR(ensure);
6235 INIT_ANCHOR(ensure);
6237 if (enlp->erange != NULL) {
6238 DECL_ANCHOR(ensure_part);
6239 LABEL *lstart = NEW_LABEL(0);
6240 LABEL *lend = NEW_LABEL(0);
6241 INIT_ANCHOR(ensure_part);
6243 add_ensure_range(iseq, enlp->erange, lstart, lend);
6245 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enlp->prev;
6246 ADD_LABEL(ensure_part, lstart);
6247 NO_CHECK(COMPILE_POPPED(ensure_part,
"ensure part", enlp->ensure_node));
6248 ADD_LABEL(ensure_part, lend);
6249 ADD_SEQ(ensure, ensure_part);
6258 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = prev_enlp;
6259 ADD_SEQ(ret, ensure);
6264 check_keyword(
const NODE *node)
6268 if (nd_type_p(node, NODE_LIST)) {
6269 while (RNODE_LIST(node)->nd_next) {
6270 node = RNODE_LIST(node)->nd_next;
6272 node = RNODE_LIST(node)->nd_head;
6275 return keyword_node_p(node);
6280 keyword_node_single_splat_p(
NODE *kwnode)
6284 NODE *node = RNODE_HASH(kwnode)->nd_head;
6285 return RNODE_LIST(node)->nd_head == NULL &&
6286 RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next == NULL;
6291 NODE *kwnode,
unsigned int *flag_ptr)
6293 *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6294 ADD_INSN1(args, argn, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6295 ADD_INSN1(args, argn, newhash,
INT2FIX(0));
6296 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6297 ADD_SEND(args, argn, id_core_hash_merge_kwd,
INT2FIX(2));
6300 #define SPLATARRAY_FALSE 0
6301 #define SPLATARRAY_TRUE 1
6302 #define DUP_SINGLE_KW_SPLAT 2
6306 unsigned int *dup_rest,
unsigned int *flag_ptr,
struct rb_callinfo_kwarg **kwarg_ptr)
6308 if (!argn)
return 0;
6310 NODE *kwnode = NULL;
6312 switch (nd_type(argn)) {
6315 int len = compile_args(iseq, args, argn, &kwnode);
6316 RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_ARGS_SPLAT) == 0);
6319 if (compile_keyword_arg(iseq, args, kwnode, kwarg_ptr, flag_ptr)) {
6323 if (keyword_node_single_splat_p(kwnode) && (*dup_rest & DUP_SINGLE_KW_SPLAT)) {
6324 compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6327 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6336 NO_CHECK(COMPILE(args,
"args (splat)", RNODE_SPLAT(argn)->nd_head));
6337 ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6338 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6339 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6340 RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_KW_SPLAT) == 0);
6343 case NODE_ARGSCAT: {
6344 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6345 int argc = setup_args_core(iseq, args, RNODE_ARGSCAT(argn)->nd_head, dup_rest, NULL, NULL);
6346 bool args_pushed =
false;
6348 if (nd_type_p(RNODE_ARGSCAT(argn)->nd_body, NODE_LIST)) {
6349 int rest_len = compile_args(iseq, args, RNODE_ARGSCAT(argn)->nd_body, &kwnode);
6350 if (kwnode) rest_len--;
6351 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(rest_len));
6355 RUBY_ASSERT(!check_keyword(RNODE_ARGSCAT(argn)->nd_body));
6356 NO_CHECK(COMPILE(args,
"args (cat: splat)", RNODE_ARGSCAT(argn)->nd_body));
6359 if (nd_type_p(RNODE_ARGSCAT(argn)->nd_head, NODE_LIST)) {
6360 ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6361 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6364 else if (!args_pushed) {
6365 ADD_INSN(args, argn, concattoarray);
6371 *flag_ptr |= VM_CALL_KW_SPLAT;
6372 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6378 case NODE_ARGSPUSH: {
6379 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6380 int argc = setup_args_core(iseq, args, RNODE_ARGSPUSH(argn)->nd_head, dup_rest, NULL, NULL);
6382 if (nd_type_p(RNODE_ARGSPUSH(argn)->nd_body, NODE_LIST)) {
6383 int rest_len = compile_args(iseq, args, RNODE_ARGSPUSH(argn)->nd_body, &kwnode);
6384 if (kwnode) rest_len--;
6385 ADD_INSN1(args, argn, newarray,
INT2FIX(rest_len));
6386 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(1));
6389 if (keyword_node_p(RNODE_ARGSPUSH(argn)->nd_body)) {
6390 kwnode = RNODE_ARGSPUSH(argn)->nd_body;
6393 NO_CHECK(COMPILE(args,
"args (cat: splat)", RNODE_ARGSPUSH(argn)->nd_body));
6394 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(1));
6400 *flag_ptr |= VM_CALL_KW_SPLAT;
6401 if (!keyword_node_single_splat_p(kwnode)) {
6402 *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6403 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6405 else if (*dup_rest & DUP_SINGLE_KW_SPLAT) {
6406 compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6409 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6417 UNKNOWN_NODE(
"setup_arg", argn,
Qnil);
6423 setup_args_splat_mut(
unsigned int *flag,
int dup_rest,
int initial_dup_rest)
6425 if ((*flag & VM_CALL_ARGS_SPLAT) && dup_rest != initial_dup_rest) {
6426 *flag |= VM_CALL_ARGS_SPLAT_MUT;
6431 setup_args_dup_rest_p(
const NODE *argn)
6433 switch(nd_type(argn)) {
6444 case NODE_IMAGINARY:
6457 return setup_args_dup_rest_p(RNODE_COLON2(argn)->nd_head);
6468 unsigned int dup_rest = SPLATARRAY_TRUE, initial_dup_rest;
6471 const NODE *check_arg = nd_type_p(argn, NODE_BLOCK_PASS) ?
6472 RNODE_BLOCK_PASS(argn)->nd_head : argn;
6475 switch(nd_type(check_arg)) {
6478 dup_rest = SPLATARRAY_FALSE;
6482 dup_rest = !nd_type_p(RNODE_ARGSCAT(check_arg)->nd_head, NODE_LIST);
6484 case(NODE_ARGSPUSH):
6486 dup_rest = !((nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_SPLAT) ||
6487 (nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_ARGSCAT) &&
6488 nd_type_p(RNODE_ARGSCAT(RNODE_ARGSPUSH(check_arg)->nd_head)->nd_head, NODE_LIST))) &&
6489 nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_body, NODE_HASH) &&
6490 !RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_brace);
6492 if (dup_rest == SPLATARRAY_FALSE) {
6494 NODE *node = RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_head;
6496 NODE *key_node = RNODE_LIST(node)->nd_head;
6497 if (key_node && setup_args_dup_rest_p(key_node)) {
6498 dup_rest = SPLATARRAY_TRUE;
6502 node = RNODE_LIST(node)->nd_next;
6503 NODE *value_node = RNODE_LIST(node)->nd_head;
6504 if (setup_args_dup_rest_p(value_node)) {
6505 dup_rest = SPLATARRAY_TRUE;
6509 node = RNODE_LIST(node)->nd_next;
6518 if (check_arg != argn && setup_args_dup_rest_p(RNODE_BLOCK_PASS(argn)->nd_body)) {
6520 dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
6523 initial_dup_rest = dup_rest;
6525 if (argn && nd_type_p(argn, NODE_BLOCK_PASS)) {
6526 DECL_ANCHOR(arg_block);
6527 INIT_ANCHOR(arg_block);
6529 if (RNODE_BLOCK_PASS(argn)->forwarding && ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
6530 int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size;
6532 RUBY_ASSERT(nd_type_p(RNODE_BLOCK_PASS(argn)->nd_head, NODE_ARGSPUSH));
6533 const NODE * arg_node =
6534 RNODE_ARGSPUSH(RNODE_BLOCK_PASS(argn)->nd_head)->nd_head;
6541 if (nd_type_p(arg_node, NODE_ARGSCAT)) {
6542 argc += setup_args_core(iseq, args, RNODE_ARGSCAT(arg_node)->nd_head, &dup_rest, flag, keywords);
6545 *flag |= VM_CALL_FORWARDING;
6547 ADD_GETLOCAL(args, argn, idx, get_lvar_level(iseq));
6548 setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6552 *flag |= VM_CALL_ARGS_BLOCKARG;
6554 NO_CHECK(COMPILE(arg_block,
"block", RNODE_BLOCK_PASS(argn)->nd_body));
6557 if (LIST_INSN_SIZE_ONE(arg_block)) {
6559 if (IS_INSN(elem)) {
6561 if (iobj->insn_id == BIN(getblockparam)) {
6562 iobj->insn_id = BIN(getblockparamproxy);
6566 ret =
INT2FIX(setup_args_core(iseq, args, RNODE_BLOCK_PASS(argn)->nd_head, &dup_rest, flag, keywords));
6567 ADD_SEQ(args, arg_block);
6570 ret =
INT2FIX(setup_args_core(iseq, args, argn, &dup_rest, flag, keywords));
6572 setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6580 int line = nd_line(body);
6582 const rb_iseq_t *block = NEW_CHILD_ISEQ(body, make_name_for_block(ISEQ_BODY(iseq)->parent_iseq), ISEQ_TYPE_BLOCK, line);
6584 ADD_INSN1(ret, body, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6585 ADD_CALL_WITH_BLOCK(ret, body, id_core_set_postexe, argc, block);
6587 iseq_set_local_table(iseq, 0, 0);
6595 int line = nd_line(node);
6596 const NODE *line_node = node;
6597 LABEL *fail_label = NEW_LABEL(line), *end_label = NEW_LABEL(line);
6599 #if !(defined(NAMED_CAPTURE_BY_SVAR) && NAMED_CAPTURE_BY_SVAR-0)
6600 ADD_INSN1(ret, line_node, getglobal,
ID2SYM(idBACKREF));
6604 ADD_INSN(ret, line_node, dup);
6605 ADD_INSNL(ret, line_node, branchunless, fail_label);
6607 for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6609 if (RNODE_BLOCK(vars)->nd_next) {
6610 ADD_INSN(ret, line_node, dup);
6613 NO_CHECK(COMPILE_POPPED(ret,
"capture", RNODE_BLOCK(vars)->nd_head));
6615 cap = new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), idAREF,
INT2FIX(1),
6618 #if !defined(NAMED_CAPTURE_SINGLE_OPT) || NAMED_CAPTURE_SINGLE_OPT-0
6619 if (!RNODE_BLOCK(vars)->nd_next && vars == node) {
6624 ADD_INSNL(nom, line_node, jump, end_label);
6625 ADD_LABEL(nom, fail_label);
6627 ADD_INSN(nom, line_node, pop);
6628 ADD_INSN(nom, line_node, putnil);
6630 ADD_LABEL(nom, end_label);
6631 (nom->last->next = cap->link.next)->prev = nom->last;
6632 (cap->link.next = nom->anchor.next)->prev = &cap->link;
6637 ADD_INSNL(ret, line_node, jump, end_label);
6638 ADD_LABEL(ret, fail_label);
6639 ADD_INSN(ret, line_node, pop);
6640 for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6642 NO_CHECK(COMPILE_POPPED(ret,
"capture", RNODE_BLOCK(vars)->nd_head));
6644 ((
INSN*)last)->insn_id = BIN(putnil);
6645 ((
INSN*)last)->operand_size = 0;
6647 ADD_LABEL(ret, end_label);
6651 optimizable_range_item_p(
const NODE *n)
6653 if (!n)
return FALSE;
6654 switch (nd_type(n)) {
6667 optimized_range_item(
const NODE *n)
6669 switch (nd_type(n)) {
6671 return rb_node_line_lineno_val(n);
6673 return rb_node_integer_literal_val(n);
6675 return rb_node_float_literal_val(n);
6677 return rb_node_rational_literal_val(n);
6678 case NODE_IMAGINARY:
6679 return rb_node_imaginary_literal_val(n);
6683 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(n)));
6690 const NODE *
const node_body =
type == NODE_IF ? RNODE_IF(node)->nd_body : RNODE_UNLESS(node)->nd_else;
6691 const NODE *
const node_else =
type == NODE_IF ? RNODE_IF(node)->nd_else : RNODE_UNLESS(node)->nd_body;
6693 const int line = nd_line(node);
6694 const NODE *line_node = node;
6695 DECL_ANCHOR(cond_seq);
6696 LABEL *then_label, *else_label, *end_label;
6699 INIT_ANCHOR(cond_seq);
6700 then_label = NEW_LABEL(line);
6701 else_label = NEW_LABEL(line);
6704 NODE *cond = RNODE_IF(node)->nd_cond;
6705 if (nd_type(cond) == NODE_BLOCK) {
6706 cond = RNODE_BLOCK(cond)->nd_head;
6709 CHECK(compile_branch_condition(iseq, cond_seq, cond, then_label, else_label));
6710 ADD_SEQ(ret, cond_seq);
6712 if (then_label->refcnt && else_label->refcnt) {
6713 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
type == NODE_IF ?
"if" :
"unless");
6716 if (then_label->refcnt) {
6717 ADD_LABEL(ret, then_label);
6719 DECL_ANCHOR(then_seq);
6720 INIT_ANCHOR(then_seq);
6721 CHECK(COMPILE_(then_seq,
"then", node_body, popped));
6723 if (else_label->refcnt) {
6724 const NODE *
const coverage_node = node_body ? node_body : node;
6725 add_trace_branch_coverage(
6728 nd_code_loc(coverage_node),
6729 nd_node_id(coverage_node),
6731 type == NODE_IF ?
"then" :
"else",
6733 end_label = NEW_LABEL(line);
6734 ADD_INSNL(then_seq, line_node, jump, end_label);
6736 ADD_INSN(then_seq, line_node, pop);
6739 ADD_SEQ(ret, then_seq);
6742 if (else_label->refcnt) {
6743 ADD_LABEL(ret, else_label);
6745 DECL_ANCHOR(else_seq);
6746 INIT_ANCHOR(else_seq);
6747 CHECK(COMPILE_(else_seq,
"else", node_else, popped));
6749 if (then_label->refcnt) {
6750 const NODE *
const coverage_node = node_else ? node_else : node;
6751 add_trace_branch_coverage(
6754 nd_code_loc(coverage_node),
6755 nd_node_id(coverage_node),
6757 type == NODE_IF ?
"else" :
"then",
6760 ADD_SEQ(ret, else_seq);
6764 ADD_LABEL(ret, end_label);
6774 const NODE *node = orig_node;
6775 LABEL *endlabel, *elselabel;
6777 DECL_ANCHOR(body_seq);
6778 DECL_ANCHOR(cond_seq);
6779 int only_special_literals = 1;
6782 enum node_type
type;
6783 const NODE *line_node;
6788 INIT_ANCHOR(body_seq);
6789 INIT_ANCHOR(cond_seq);
6791 RHASH_TBL_RAW(literals)->type = &cdhash_type;
6793 CHECK(COMPILE(head,
"case base", RNODE_CASE(node)->nd_head));
6795 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"case");
6797 node = RNODE_CASE(node)->nd_body;
6798 EXPECT_NODE(
"NODE_CASE", node, NODE_WHEN, COMPILE_NG);
6799 type = nd_type(node);
6800 line = nd_line(node);
6803 endlabel = NEW_LABEL(line);
6804 elselabel = NEW_LABEL(line);
6808 while (
type == NODE_WHEN) {
6811 l1 = NEW_LABEL(line);
6812 ADD_LABEL(body_seq, l1);
6813 ADD_INSN(body_seq, line_node, pop);
6815 const NODE *
const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
6816 add_trace_branch_coverage(
6819 nd_code_loc(coverage_node),
6820 nd_node_id(coverage_node),
6825 CHECK(COMPILE_(body_seq,
"when body", RNODE_WHEN(node)->nd_body, popped));
6826 ADD_INSNL(body_seq, line_node, jump, endlabel);
6828 vals = RNODE_WHEN(node)->nd_head;
6830 switch (nd_type(vals)) {
6832 only_special_literals = when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals);
6833 if (only_special_literals < 0)
return COMPILE_NG;
6838 only_special_literals = 0;
6839 CHECK(when_splat_vals(iseq, cond_seq, vals, l1, only_special_literals, literals));
6842 UNKNOWN_NODE(
"NODE_CASE", vals, COMPILE_NG);
6846 EXPECT_NODE_NONULL(
"NODE_CASE", node, NODE_LIST, COMPILE_NG);
6849 node = RNODE_WHEN(node)->nd_next;
6853 type = nd_type(node);
6854 line = nd_line(node);
6859 ADD_LABEL(cond_seq, elselabel);
6860 ADD_INSN(cond_seq, line_node, pop);
6861 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id,
"else", branches);
6862 CHECK(COMPILE_(cond_seq,
"else", node, popped));
6863 ADD_INSNL(cond_seq, line_node, jump, endlabel);
6866 debugs(
"== else (implicit)\n");
6867 ADD_LABEL(cond_seq, elselabel);
6868 ADD_INSN(cond_seq, orig_node, pop);
6869 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id,
"else", branches);
6871 ADD_INSN(cond_seq, orig_node, putnil);
6873 ADD_INSNL(cond_seq, orig_node, jump, endlabel);
6876 if (only_special_literals && ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
6877 ADD_INSN(ret, orig_node, dup);
6878 ADD_INSN2(ret, orig_node, opt_case_dispatch, literals, elselabel);
6880 LABEL_REF(elselabel);
6883 ADD_SEQ(ret, cond_seq);
6884 ADD_SEQ(ret, body_seq);
6885 ADD_LABEL(ret, endlabel);
6894 const NODE *node = RNODE_CASE2(orig_node)->nd_body;
6896 DECL_ANCHOR(body_seq);
6900 branches = decl_branch_base(iseq, PTR2NUM(orig_node), nd_code_loc(orig_node),
"case");
6902 INIT_ANCHOR(body_seq);
6903 endlabel = NEW_LABEL(nd_line(node));
6905 while (node && nd_type_p(node, NODE_WHEN)) {
6906 const int line = nd_line(node);
6907 LABEL *l1 = NEW_LABEL(line);
6908 ADD_LABEL(body_seq, l1);
6910 const NODE *
const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
6911 add_trace_branch_coverage(
6914 nd_code_loc(coverage_node),
6915 nd_node_id(coverage_node),
6920 CHECK(COMPILE_(body_seq,
"when", RNODE_WHEN(node)->nd_body, popped));
6921 ADD_INSNL(body_seq, node, jump, endlabel);
6923 vals = RNODE_WHEN(node)->nd_head;
6925 EXPECT_NODE_NONULL(
"NODE_WHEN", node, NODE_LIST, COMPILE_NG);
6927 switch (nd_type(vals)) {
6931 val = RNODE_LIST(vals)->nd_head;
6932 lnext = NEW_LABEL(nd_line(val));
6933 debug_compile(
"== when2\n", (
void)0);
6934 CHECK(compile_branch_condition(iseq, ret, val, l1, lnext));
6935 ADD_LABEL(ret, lnext);
6936 vals = RNODE_LIST(vals)->nd_next;
6942 ADD_INSN(ret, vals, putnil);
6943 CHECK(COMPILE(ret,
"when2/cond splat", vals));
6944 ADD_INSN1(ret, vals, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_WHEN | VM_CHECKMATCH_ARRAY));
6945 ADD_INSNL(ret, vals, branchif, l1);
6948 UNKNOWN_NODE(
"NODE_WHEN", vals, COMPILE_NG);
6950 node = RNODE_WHEN(node)->nd_next;
6953 const NODE *
const coverage_node = node ? node : orig_node;
6954 add_trace_branch_coverage(
6957 nd_code_loc(coverage_node),
6958 nd_node_id(coverage_node),
6962 CHECK(COMPILE_(ret,
"else", node, popped));
6963 ADD_INSNL(ret, orig_node, jump, endlabel);
6965 ADD_SEQ(ret, body_seq);
6966 ADD_LABEL(ret, endlabel);
6970 static int iseq_compile_pattern_match(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache);
6972 static int iseq_compile_pattern_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *match_failed,
bool in_single_pattern,
int base_index);
6973 static int iseq_compile_array_deconstruct(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *deconstruct,
LABEL *deconstructed,
LABEL *match_failed,
LABEL *type_error,
bool in_single_pattern,
int base_index,
bool use_deconstructed_cache);
6974 static int iseq_compile_pattern_set_general_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
VALUE errmsg,
int base_index);
6975 static int iseq_compile_pattern_set_length_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
VALUE errmsg,
VALUE pattern_length,
int base_index);
6976 static int iseq_compile_pattern_set_eqq_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
int base_index);
6978 #define CASE3_BI_OFFSET_DECONSTRUCTED_CACHE 0
6979 #define CASE3_BI_OFFSET_ERROR_STRING 1
6980 #define CASE3_BI_OFFSET_KEY_ERROR_P 2
6981 #define CASE3_BI_OFFSET_KEY_ERROR_MATCHEE 3
6982 #define CASE3_BI_OFFSET_KEY_ERROR_KEY 4
6985 iseq_compile_pattern_each(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *matched,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache)
6987 const int line = nd_line(node);
6988 const NODE *line_node = node;
6990 switch (nd_type(node)) {
7044 const NODE *args = RNODE_ARYPTN(node)->pre_args;
7045 const int pre_args_num = RNODE_ARYPTN(node)->pre_args ?
rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->pre_args)->as.nd_alen) : 0;
7046 const int post_args_num = RNODE_ARYPTN(node)->post_args ?
rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->post_args)->as.nd_alen) : 0;
7048 const int min_argc = pre_args_num + post_args_num;
7049 const int use_rest_num = RNODE_ARYPTN(node)->rest_arg && (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) ||
7050 (!NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) && post_args_num > 0));
7052 LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7054 match_failed = NEW_LABEL(line);
7055 type_error = NEW_LABEL(line);
7056 deconstruct = NEW_LABEL(line);
7057 deconstructed = NEW_LABEL(line);
7060 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7061 ADD_INSN(ret, line_node, swap);
7067 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7069 CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7071 ADD_INSN(ret, line_node, dup);
7072 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7073 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7074 ADD_SEND(ret, line_node, RNODE_ARYPTN(node)->rest_arg ? idGE : idEq,
INT2FIX(1));
7075 if (in_single_pattern) {
7076 CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node,
7077 RNODE_ARYPTN(node)->rest_arg ? rb_fstring_lit(
"%p length mismatch (given %p, expected %p+)") :
7078 rb_fstring_lit(
"%p length mismatch (given %p, expected %p)"),
7079 INT2FIX(min_argc), base_index + 1 ));
7081 ADD_INSNL(ret, line_node, branchunless, match_failed);
7083 for (i = 0; i < pre_args_num; i++) {
7084 ADD_INSN(ret, line_node, dup);
7085 ADD_INSN1(ret, line_node, putobject,
INT2FIX(i));
7086 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7087 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7088 args = RNODE_LIST(args)->nd_next;
7091 if (RNODE_ARYPTN(node)->rest_arg) {
7092 if (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg)) {
7093 ADD_INSN(ret, line_node, dup);
7094 ADD_INSN1(ret, line_node, putobject,
INT2FIX(pre_args_num));
7095 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7096 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7097 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7098 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7099 ADD_INSN1(ret, line_node, setn,
INT2FIX(4));
7100 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7102 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_ARYPTN(node)->rest_arg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7105 if (post_args_num > 0) {
7106 ADD_INSN(ret, line_node, dup);
7107 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7108 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7109 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7110 ADD_INSN1(ret, line_node, setn,
INT2FIX(2));
7111 ADD_INSN(ret, line_node, pop);
7116 args = RNODE_ARYPTN(node)->post_args;
7117 for (i = 0; i < post_args_num; i++) {
7118 ADD_INSN(ret, line_node, dup);
7120 ADD_INSN1(ret, line_node, putobject,
INT2FIX(pre_args_num + i));
7121 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7122 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7124 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7125 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7126 args = RNODE_LIST(args)->nd_next;
7129 ADD_INSN(ret, line_node, pop);
7131 ADD_INSN(ret, line_node, pop);
7133 ADD_INSNL(ret, line_node, jump, matched);
7134 ADD_INSN(ret, line_node, putnil);
7136 ADD_INSN(ret, line_node, putnil);
7139 ADD_LABEL(ret, type_error);
7140 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7142 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct must return Array"));
7143 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7144 ADD_INSN(ret, line_node, pop);
7146 ADD_LABEL(ret, match_failed);
7147 ADD_INSN(ret, line_node, pop);
7149 ADD_INSN(ret, line_node, pop);
7151 ADD_INSNL(ret, line_node, jump, unmatched);
7204 const NODE *args = RNODE_FNDPTN(node)->args;
7205 const int args_num = RNODE_FNDPTN(node)->args ?
rb_long2int(RNODE_LIST(RNODE_FNDPTN(node)->args)->as.nd_alen) : 0;
7207 LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7208 match_failed = NEW_LABEL(line);
7209 type_error = NEW_LABEL(line);
7210 deconstruct = NEW_LABEL(line);
7211 deconstructed = NEW_LABEL(line);
7213 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7215 CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7217 ADD_INSN(ret, line_node, dup);
7218 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7219 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7220 ADD_SEND(ret, line_node, idGE,
INT2FIX(1));
7221 if (in_single_pattern) {
7222 CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node, rb_fstring_lit(
"%p length mismatch (given %p, expected %p+)"),
INT2FIX(args_num), base_index + 1 ));
7224 ADD_INSNL(ret, line_node, branchunless, match_failed);
7227 LABEL *while_begin = NEW_LABEL(nd_line(node));
7228 LABEL *next_loop = NEW_LABEL(nd_line(node));
7229 LABEL *find_succeeded = NEW_LABEL(line);
7230 LABEL *find_failed = NEW_LABEL(nd_line(node));
7233 ADD_INSN(ret, line_node, dup);
7234 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7236 ADD_INSN(ret, line_node, dup);
7237 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7238 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7240 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7242 ADD_LABEL(ret, while_begin);
7244 ADD_INSN(ret, line_node, dup);
7245 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7246 ADD_SEND(ret, line_node, idLE,
INT2FIX(1));
7247 ADD_INSNL(ret, line_node, branchunless, find_failed);
7249 for (j = 0; j < args_num; j++) {
7250 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7251 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7253 ADD_INSN1(ret, line_node, putobject,
INT2FIX(j));
7254 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7256 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7258 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, next_loop, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7259 args = RNODE_LIST(args)->nd_next;
7262 if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->pre_rest_arg)) {
7263 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7264 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7265 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7266 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7267 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->pre_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7269 if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->post_rest_arg)) {
7270 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7271 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7272 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7273 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7274 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7275 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7276 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->post_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7278 ADD_INSNL(ret, line_node, jump, find_succeeded);
7280 ADD_LABEL(ret, next_loop);
7281 ADD_INSN1(ret, line_node, putobject,
INT2FIX(1));
7282 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7283 ADD_INSNL(ret, line_node, jump, while_begin);
7285 ADD_LABEL(ret, find_failed);
7286 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(3));
7287 if (in_single_pattern) {
7288 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7289 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"%p does not match to find pattern"));
7290 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7291 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(2));
7292 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7294 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7295 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7297 ADD_INSN(ret, line_node, pop);
7298 ADD_INSN(ret, line_node, pop);
7300 ADD_INSNL(ret, line_node, jump, match_failed);
7301 ADD_INSN1(ret, line_node, dupn,
INT2FIX(3));
7303 ADD_LABEL(ret, find_succeeded);
7304 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(3));
7307 ADD_INSN(ret, line_node, pop);
7308 ADD_INSNL(ret, line_node, jump, matched);
7309 ADD_INSN(ret, line_node, putnil);
7311 ADD_LABEL(ret, type_error);
7312 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7314 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct must return Array"));
7315 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7316 ADD_INSN(ret, line_node, pop);
7318 ADD_LABEL(ret, match_failed);
7319 ADD_INSN(ret, line_node, pop);
7320 ADD_INSNL(ret, line_node, jump, unmatched);
7384 LABEL *match_failed, *type_error;
7387 match_failed = NEW_LABEL(line);
7388 type_error = NEW_LABEL(line);
7390 if (RNODE_HSHPTN(node)->nd_pkwargs && !RNODE_HSHPTN(node)->nd_pkwrestarg) {
7391 const NODE *kw_args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7392 keys =
rb_ary_new_capa(kw_args ? RNODE_LIST(kw_args)->as.nd_alen/2 : 0);
7394 rb_ary_push(keys, get_symbol_value(iseq, RNODE_LIST(kw_args)->nd_head));
7395 kw_args = RNODE_LIST(RNODE_LIST(kw_args)->nd_next)->nd_next;
7399 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7401 ADD_INSN(ret, line_node, dup);
7402 ADD_INSN1(ret, line_node, putobject,
ID2SYM(
rb_intern(
"deconstruct_keys")));
7403 ADD_SEND(ret, line_node, idRespond_to,
INT2FIX(1));
7404 if (in_single_pattern) {
7405 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p does not respond to #deconstruct_keys"), base_index + 1 ));
7407 ADD_INSNL(ret, line_node, branchunless, match_failed);
7410 ADD_INSN(ret, line_node, putnil);
7413 ADD_INSN1(ret, line_node, duparray, keys);
7418 ADD_INSN(ret, line_node, dup);
7420 ADD_INSNL(ret, line_node, branchunless, type_error);
7422 if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7426 if (RNODE_HSHPTN(node)->nd_pkwargs) {
7430 args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7432 DECL_ANCHOR(match_values);
7433 INIT_ANCHOR(match_values);
7434 keys_num =
rb_long2int(RNODE_LIST(args)->as.nd_alen) / 2;
7435 for (i = 0; i < keys_num; i++) {
7436 NODE *key_node = RNODE_LIST(args)->nd_head;
7437 NODE *value_node = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_head;
7438 VALUE key = get_symbol_value(iseq, key_node);
7440 ADD_INSN(ret, line_node, dup);
7441 ADD_INSN1(ret, line_node, putobject, key);
7443 if (in_single_pattern) {
7444 LABEL *match_succeeded;
7445 match_succeeded = NEW_LABEL(line);
7447 ADD_INSN(ret, line_node, dup);
7448 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7451 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 2 ));
7452 ADD_INSN1(ret, line_node, putobject,
Qtrue);
7453 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 3 ));
7454 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7455 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4 ));
7456 ADD_INSN1(ret, line_node, putobject, key);
7457 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_KEY + 5 ));
7459 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(4));
7461 ADD_LABEL(ret, match_succeeded);
7463 ADD_INSNL(ret, line_node, branchunless, match_failed);
7465 ADD_INSN(match_values, line_node, dup);
7466 ADD_INSN1(match_values, line_node, putobject, key);
7467 ADD_SEND(match_values, line_node, RNODE_HSHPTN(node)->nd_pkwrestarg ?
rb_intern(
"delete") : idAREF,
INT2FIX(1));
7468 CHECK(iseq_compile_pattern_match(iseq, match_values, value_node, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7469 args = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_next;
7471 ADD_SEQ(ret, match_values);
7475 ADD_INSN(ret, line_node, dup);
7476 ADD_SEND(ret, line_node, idEmptyP,
INT2FIX(0));
7477 if (in_single_pattern) {
7478 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p is not empty"), base_index + 1 ));
7480 ADD_INSNL(ret, line_node, branchunless, match_failed);
7483 if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7484 if (RNODE_HSHPTN(node)->nd_pkwrestarg == NODE_SPECIAL_NO_REST_KEYWORD) {
7485 ADD_INSN(ret, line_node, dup);
7486 ADD_SEND(ret, line_node, idEmptyP,
INT2FIX(0));
7487 if (in_single_pattern) {
7488 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"rest of %p is not empty"), base_index + 1 ));
7490 ADD_INSNL(ret, line_node, branchunless, match_failed);
7493 ADD_INSN(ret, line_node, dup);
7494 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_HSHPTN(node)->nd_pkwrestarg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7498 ADD_INSN(ret, line_node, pop);
7499 ADD_INSNL(ret, line_node, jump, matched);
7500 ADD_INSN(ret, line_node, putnil);
7502 ADD_LABEL(ret, type_error);
7503 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7505 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct_keys must return Hash"));
7506 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7507 ADD_INSN(ret, line_node, pop);
7509 ADD_LABEL(ret, match_failed);
7510 ADD_INSN(ret, line_node, pop);
7511 ADD_INSNL(ret, line_node, jump, unmatched);
7520 case NODE_IMAGINARY:
7548 CHECK(COMPILE(ret,
"case in literal", node));
7549 if (in_single_pattern) {
7550 ADD_INSN1(ret, line_node, dupn,
INT2FIX(2));
7552 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
7553 if (in_single_pattern) {
7554 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 2 ));
7556 ADD_INSNL(ret, line_node, branchif, matched);
7557 ADD_INSNL(ret, line_node, jump, unmatched);
7561 ID id = RNODE_LASGN(node)->nd_vid;
7562 int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
7564 if (in_alt_pattern) {
7566 if (name && strlen(name) > 0 && name[0] !=
'_') {
7567 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
7573 ADD_SETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
7574 ADD_INSNL(ret, line_node, jump, matched);
7579 ID id = RNODE_DASGN(node)->nd_vid;
7581 idx = get_dyna_var_idx(iseq,
id, &lv, &ls);
7583 if (in_alt_pattern) {
7585 if (name && strlen(name) > 0 && name[0] !=
'_') {
7586 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
7593 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
7597 ADD_SETLOCAL(ret, line_node, ls - idx, lv);
7598 ADD_INSNL(ret, line_node, jump, matched);
7603 LABEL *match_failed;
7604 match_failed = unmatched;
7605 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_IF(node)->nd_body, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7606 CHECK(COMPILE(ret,
"case in if", RNODE_IF(node)->nd_cond));
7607 if (in_single_pattern) {
7608 LABEL *match_succeeded;
7609 match_succeeded = NEW_LABEL(line);
7611 ADD_INSN(ret, line_node, dup);
7612 if (nd_type_p(node, NODE_IF)) {
7613 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7616 ADD_INSNL(ret, line_node, branchunless, match_succeeded);
7619 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"guard clause does not return true"));
7620 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7621 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7622 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7624 ADD_INSN(ret, line_node, pop);
7625 ADD_INSN(ret, line_node, pop);
7627 ADD_LABEL(ret, match_succeeded);
7629 if (nd_type_p(node, NODE_IF)) {
7630 ADD_INSNL(ret, line_node, branchunless, match_failed);
7633 ADD_INSNL(ret, line_node, branchif, match_failed);
7635 ADD_INSNL(ret, line_node, jump, matched);
7640 LABEL *match_failed;
7641 match_failed = NEW_LABEL(line);
7643 n = RNODE_HASH(node)->nd_head;
7644 if (! (nd_type_p(n, NODE_LIST) && RNODE_LIST(n)->as.nd_alen == 2)) {
7645 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
7649 ADD_INSN(ret, line_node, dup);
7650 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(n)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 , use_deconstructed_cache));
7651 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head, matched, match_failed, in_single_pattern, in_alt_pattern, base_index,
false));
7652 ADD_INSN(ret, line_node, putnil);
7654 ADD_LABEL(ret, match_failed);
7655 ADD_INSN(ret, line_node, pop);
7656 ADD_INSNL(ret, line_node, jump, unmatched);
7660 LABEL *match_succeeded, *fin;
7661 match_succeeded = NEW_LABEL(line);
7662 fin = NEW_LABEL(line);
7664 ADD_INSN(ret, line_node, dup);
7665 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_1st, match_succeeded, fin, in_single_pattern,
true, base_index + 1 , use_deconstructed_cache));
7666 ADD_LABEL(ret, match_succeeded);
7667 ADD_INSN(ret, line_node, pop);
7668 ADD_INSNL(ret, line_node, jump, matched);
7669 ADD_INSN(ret, line_node, putnil);
7670 ADD_LABEL(ret, fin);
7671 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_2nd, matched, unmatched, in_single_pattern,
true, base_index, use_deconstructed_cache));
7675 UNKNOWN_NODE(
"NODE_IN", node, COMPILE_NG);
7681 iseq_compile_pattern_match(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache)
7683 LABEL *fin = NEW_LABEL(nd_line(node));
7684 CHECK(iseq_compile_pattern_each(iseq, ret, node, fin, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7685 ADD_LABEL(ret, fin);
7690 iseq_compile_pattern_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *match_failed,
bool in_single_pattern,
int base_index)
7692 const NODE *line_node = node;
7694 if (RNODE_ARYPTN(node)->nd_pconst) {
7695 ADD_INSN(ret, line_node, dup);
7696 CHECK(COMPILE(ret,
"constant", RNODE_ARYPTN(node)->nd_pconst));
7697 if (in_single_pattern) {
7698 ADD_INSN1(ret, line_node, dupn,
INT2FIX(2));
7700 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
7701 if (in_single_pattern) {
7702 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 3 ));
7704 ADD_INSNL(ret, line_node, branchunless, match_failed);
7711 iseq_compile_array_deconstruct(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *deconstruct,
LABEL *deconstructed,
LABEL *match_failed,
LABEL *type_error,
bool in_single_pattern,
int base_index,
bool use_deconstructed_cache)
7713 const NODE *line_node = node;
7717 if (use_deconstructed_cache) {
7719 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
7720 ADD_INSNL(ret, line_node, branchnil, deconstruct);
7723 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
7724 ADD_INSNL(ret, line_node, branchunless, match_failed);
7727 ADD_INSN(ret, line_node, pop);
7728 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE - 1 ));
7729 ADD_INSNL(ret, line_node, jump, deconstructed);
7732 ADD_INSNL(ret, line_node, jump, deconstruct);
7735 ADD_LABEL(ret, deconstruct);
7736 ADD_INSN(ret, line_node, dup);
7737 ADD_INSN1(ret, line_node, putobject,
ID2SYM(
rb_intern(
"deconstruct")));
7738 ADD_SEND(ret, line_node, idRespond_to,
INT2FIX(1));
7741 if (use_deconstructed_cache) {
7742 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE + 1 ));
7745 if (in_single_pattern) {
7746 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p does not respond to #deconstruct"), base_index + 1 ));
7749 ADD_INSNL(ret, line_node, branchunless, match_failed);
7754 if (use_deconstructed_cache) {
7755 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
7758 ADD_INSN(ret, line_node, dup);
7760 ADD_INSNL(ret, line_node, branchunless, type_error);
7762 ADD_LABEL(ret, deconstructed);
7778 const int line = nd_line(node);
7779 const NODE *line_node = node;
7780 LABEL *match_succeeded = NEW_LABEL(line);
7782 ADD_INSN(ret, line_node, dup);
7783 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7785 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7786 ADD_INSN1(ret, line_node, putobject, errmsg);
7787 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7788 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(2));
7789 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7791 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7792 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7794 ADD_INSN(ret, line_node, pop);
7795 ADD_INSN(ret, line_node, pop);
7796 ADD_LABEL(ret, match_succeeded);
7812 const int line = nd_line(node);
7813 const NODE *line_node = node;
7814 LABEL *match_succeeded = NEW_LABEL(line);
7816 ADD_INSN(ret, line_node, dup);
7817 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7819 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7820 ADD_INSN1(ret, line_node, putobject, errmsg);
7821 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7822 ADD_INSN(ret, line_node, dup);
7823 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7824 ADD_INSN1(ret, line_node, putobject, pattern_length);
7825 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(4));
7826 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7828 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7829 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2));
7831 ADD_INSN(ret, line_node, pop);
7832 ADD_INSN(ret, line_node, pop);
7833 ADD_LABEL(ret, match_succeeded);
7839 iseq_compile_pattern_set_eqq_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
int base_index)
7849 const int line = nd_line(node);
7850 const NODE *line_node = node;
7851 LABEL *match_succeeded = NEW_LABEL(line);
7853 ADD_INSN(ret, line_node, dup);
7854 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7856 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7857 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"%p === %p does not return true"));
7858 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7859 ADD_INSN1(ret, line_node, topn,
INT2FIX(5));
7860 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(3));
7861 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7863 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7864 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7866 ADD_INSN(ret, line_node, pop);
7867 ADD_INSN(ret, line_node, pop);
7869 ADD_LABEL(ret, match_succeeded);
7870 ADD_INSN1(ret, line_node, setn,
INT2FIX(2));
7871 ADD_INSN(ret, line_node, pop);
7872 ADD_INSN(ret, line_node, pop);
7880 const NODE *pattern;
7881 const NODE *node = orig_node;
7882 LABEL *endlabel, *elselabel;
7884 DECL_ANCHOR(body_seq);
7885 DECL_ANCHOR(cond_seq);
7887 enum node_type
type;
7888 const NODE *line_node;
7891 bool single_pattern;
7894 INIT_ANCHOR(body_seq);
7895 INIT_ANCHOR(cond_seq);
7897 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"case");
7899 node = RNODE_CASE3(node)->nd_body;
7900 EXPECT_NODE(
"NODE_CASE3", node, NODE_IN, COMPILE_NG);
7901 type = nd_type(node);
7902 line = nd_line(node);
7904 single_pattern = !RNODE_IN(node)->nd_next;
7906 endlabel = NEW_LABEL(line);
7907 elselabel = NEW_LABEL(line);
7909 if (single_pattern) {
7911 ADD_INSN(head, line_node, putnil);
7912 ADD_INSN(head, line_node, putnil);
7913 ADD_INSN1(head, line_node, putobject,
Qfalse);
7914 ADD_INSN(head, line_node, putnil);
7916 ADD_INSN(head, line_node, putnil);
7918 CHECK(COMPILE(head,
"case base", RNODE_CASE3(orig_node)->nd_head));
7922 while (
type == NODE_IN) {
7926 ADD_INSN(body_seq, line_node, putnil);
7928 l1 = NEW_LABEL(line);
7929 ADD_LABEL(body_seq, l1);
7930 ADD_INSN1(body_seq, line_node, adjuststack,
INT2FIX(single_pattern ? 6 : 2));
7932 const NODE *
const coverage_node = RNODE_IN(node)->nd_body ? RNODE_IN(node)->nd_body : node;
7933 add_trace_branch_coverage(
7936 nd_code_loc(coverage_node),
7937 nd_node_id(coverage_node),
7942 CHECK(COMPILE_(body_seq,
"in body", RNODE_IN(node)->nd_body, popped));
7943 ADD_INSNL(body_seq, line_node, jump, endlabel);
7945 pattern = RNODE_IN(node)->nd_head;
7947 int pat_line = nd_line(pattern);
7948 LABEL *next_pat = NEW_LABEL(pat_line);
7949 ADD_INSN (cond_seq, pattern, dup);
7951 CHECK(iseq_compile_pattern_each(iseq, cond_seq, pattern, l1, next_pat, single_pattern,
false, 2,
true));
7952 ADD_LABEL(cond_seq, next_pat);
7953 LABEL_UNREMOVABLE(next_pat);
7956 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
7960 node = RNODE_IN(node)->nd_next;
7964 type = nd_type(node);
7965 line = nd_line(node);
7970 ADD_LABEL(cond_seq, elselabel);
7971 ADD_INSN(cond_seq, line_node, pop);
7972 ADD_INSN(cond_seq, line_node, pop);
7973 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id,
"else", branches);
7974 CHECK(COMPILE_(cond_seq,
"else", node, popped));
7975 ADD_INSNL(cond_seq, line_node, jump, endlabel);
7976 ADD_INSN(cond_seq, line_node, putnil);
7978 ADD_INSN(cond_seq, line_node, putnil);
7982 debugs(
"== else (implicit)\n");
7983 ADD_LABEL(cond_seq, elselabel);
7984 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id,
"else", branches);
7985 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7987 if (single_pattern) {
7995 LABEL *key_error, *fin;
7998 key_error = NEW_LABEL(line);
7999 fin = NEW_LABEL(line);
8002 kw_arg->references = 0;
8003 kw_arg->keyword_len = 2;
8007 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_P + 2));
8008 ADD_INSNL(cond_seq, orig_node, branchif, key_error);
8010 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8011 ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit(
"%p: %s"));
8012 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(4));
8013 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8014 ADD_SEND(cond_seq, orig_node, id_core_sprintf,
INT2FIX(3));
8015 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(2));
8016 ADD_INSNL(cond_seq, orig_node, jump, fin);
8018 ADD_LABEL(cond_seq, key_error);
8020 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8021 ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit(
"%p: %s"));
8022 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(4));
8023 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8024 ADD_SEND(cond_seq, orig_node, id_core_sprintf,
INT2FIX(3));
8025 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4));
8026 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_KEY + 5));
8028 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(1));
8030 ADD_LABEL(cond_seq, fin);
8034 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(2));
8035 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(2));
8037 ADD_INSN1(cond_seq, orig_node, adjuststack,
INT2FIX(single_pattern ? 7 : 3));
8039 ADD_INSN(cond_seq, orig_node, putnil);
8041 ADD_INSNL(cond_seq, orig_node, jump, endlabel);
8042 ADD_INSN1(cond_seq, orig_node, dupn,
INT2FIX(single_pattern ? 5 : 1));
8044 ADD_INSN(cond_seq, line_node, putnil);
8048 ADD_SEQ(ret, cond_seq);
8049 ADD_SEQ(ret, body_seq);
8050 ADD_LABEL(ret, endlabel);
8054 #undef CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
8055 #undef CASE3_BI_OFFSET_ERROR_STRING
8056 #undef CASE3_BI_OFFSET_KEY_ERROR_P
8057 #undef CASE3_BI_OFFSET_KEY_ERROR_MATCHEE
8058 #undef CASE3_BI_OFFSET_KEY_ERROR_KEY
8063 const int line = (int)nd_line(node);
8064 const NODE *line_node = node;
8066 LABEL *prev_start_label = ISEQ_COMPILE_DATA(iseq)->start_label;
8067 LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
8068 LABEL *prev_redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label;
8069 int prev_loopval_popped = ISEQ_COMPILE_DATA(iseq)->loopval_popped;
8074 LABEL *next_label = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(line);
8075 LABEL *redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label = NEW_LABEL(line);
8076 LABEL *break_label = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(line);
8077 LABEL *end_label = NEW_LABEL(line);
8078 LABEL *adjust_label = NEW_LABEL(line);
8080 LABEL *next_catch_label = NEW_LABEL(line);
8081 LABEL *tmp_label = NULL;
8083 ISEQ_COMPILE_DATA(iseq)->loopval_popped = 0;
8084 push_ensure_entry(iseq, &enl, NULL, NULL);
8086 if (RNODE_WHILE(node)->nd_state == 1) {
8087 ADD_INSNL(ret, line_node, jump, next_label);
8090 tmp_label = NEW_LABEL(line);
8091 ADD_INSNL(ret, line_node, jump, tmp_label);
8093 ADD_LABEL(ret, adjust_label);
8094 ADD_INSN(ret, line_node, putnil);
8095 ADD_LABEL(ret, next_catch_label);
8096 ADD_INSN(ret, line_node, pop);
8097 ADD_INSNL(ret, line_node, jump, next_label);
8098 if (tmp_label) ADD_LABEL(ret, tmp_label);
8100 ADD_LABEL(ret, redo_label);
8101 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
type == NODE_WHILE ?
"while" :
"until");
8103 const NODE *
const coverage_node = RNODE_WHILE(node)->nd_body ? RNODE_WHILE(node)->nd_body : node;
8104 add_trace_branch_coverage(
8107 nd_code_loc(coverage_node),
8108 nd_node_id(coverage_node),
8113 CHECK(COMPILE_POPPED(ret,
"while body", RNODE_WHILE(node)->nd_body));
8114 ADD_LABEL(ret, next_label);
8116 if (
type == NODE_WHILE) {
8117 CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8118 redo_label, end_label));
8122 CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8123 end_label, redo_label));
8126 ADD_LABEL(ret, end_label);
8127 ADD_ADJUST_RESTORE(ret, adjust_label);
8129 if (UNDEF_P(RNODE_WHILE(node)->nd_state)) {
8131 COMPILE_ERROR(ERROR_ARGS
"unsupported: putundef");
8135 ADD_INSN(ret, line_node, putnil);
8138 ADD_LABEL(ret, break_label);
8141 ADD_INSN(ret, line_node, pop);
8144 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, redo_label, break_label, NULL,
8146 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, redo_label, break_label, NULL,
8148 ADD_CATCH_ENTRY(CATCH_TYPE_REDO, redo_label, break_label, NULL,
8149 ISEQ_COMPILE_DATA(iseq)->redo_label);
8151 ISEQ_COMPILE_DATA(iseq)->start_label = prev_start_label;
8152 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
8153 ISEQ_COMPILE_DATA(iseq)->redo_label = prev_redo_label;
8154 ISEQ_COMPILE_DATA(iseq)->loopval_popped = prev_loopval_popped;
8155 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->prev;
8162 const int line = nd_line(node);
8163 const NODE *line_node = node;
8164 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
8165 LABEL *retry_label = NEW_LABEL(line);
8166 LABEL *retry_end_l = NEW_LABEL(line);
8169 ADD_LABEL(ret, retry_label);
8170 if (nd_type_p(node, NODE_FOR)) {
8171 CHECK(COMPILE(ret,
"iter caller (for)", RNODE_FOR(node)->nd_iter));
8173 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8174 NEW_CHILD_ISEQ(RNODE_FOR(node)->nd_body, make_name_for_block(iseq),
8175 ISEQ_TYPE_BLOCK, line);
8176 ADD_SEND_WITH_BLOCK(ret, line_node, idEach,
INT2FIX(0), child_iseq);
8179 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8180 NEW_CHILD_ISEQ(RNODE_ITER(node)->nd_body, make_name_for_block(iseq),
8181 ISEQ_TYPE_BLOCK, line);
8182 CHECK(COMPILE(ret,
"iter caller", RNODE_ITER(node)->nd_iter));
8196 iobj = IS_INSN(last_elem) ? (
INSN*) last_elem : (
INSN*) get_prev_insn((
INSN*) last_elem);
8197 while (!IS_INSN_ID(iobj, send) && !IS_INSN_ID(iobj, invokesuper) && !IS_INSN_ID(iobj, sendforward) && !IS_INSN_ID(iobj, invokesuperforward)) {
8198 iobj = (
INSN*) get_prev_insn(iobj);
8200 ELEM_INSERT_NEXT(&iobj->link, (
LINK_ELEMENT*) retry_end_l);
8204 if (&iobj->link == LAST_ELEMENT(ret)) {
8210 ADD_INSN(ret, line_node, pop);
8213 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
8215 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, child_iseq, retry_end_l);
8225 const NODE *line_node = node;
8226 const NODE *var = RNODE_FOR_MASGN(node)->nd_var;
8227 LABEL *not_single = NEW_LABEL(nd_line(var));
8228 LABEL *not_ary = NEW_LABEL(nd_line(var));
8229 CHECK(COMPILE(ret,
"for var", var));
8230 ADD_INSN(ret, line_node, dup);
8231 ADD_CALL(ret, line_node, idLength,
INT2FIX(0));
8232 ADD_INSN1(ret, line_node, putobject,
INT2FIX(1));
8233 ADD_CALL(ret, line_node, idEq,
INT2FIX(1));
8234 ADD_INSNL(ret, line_node, branchunless, not_single);
8235 ADD_INSN(ret, line_node, dup);
8236 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
8237 ADD_CALL(ret, line_node, idAREF,
INT2FIX(1));
8238 ADD_INSN1(ret, line_node, putobject,
rb_cArray);
8239 ADD_INSN(ret, line_node, swap);
8241 ADD_INSN(ret, line_node, dup);
8242 ADD_INSNL(ret, line_node, branchunless, not_ary);
8243 ADD_INSN(ret, line_node, swap);
8244 ADD_LABEL(ret, not_ary);
8245 ADD_INSN(ret, line_node, pop);
8246 ADD_LABEL(ret, not_single);
8253 const NODE *line_node = node;
8254 unsigned long throw_flag = 0;
8256 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8258 LABEL *splabel = NEW_LABEL(0);
8259 ADD_LABEL(ret, splabel);
8260 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8261 CHECK(COMPILE_(ret,
"break val (while/until)", RNODE_BREAK(node)->nd_stts,
8262 ISEQ_COMPILE_DATA(iseq)->loopval_popped));
8263 add_ensure_iseq(ret, iseq, 0);
8264 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8265 ADD_ADJUST_RESTORE(ret, splabel);
8268 ADD_INSN(ret, line_node, putnil);
8275 if (!ISEQ_COMPILE_DATA(ip)) {
8280 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8281 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8283 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8286 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8287 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with break");
8291 ip = ISEQ_BODY(ip)->parent_iseq;
8296 CHECK(COMPILE(ret,
"break val (block)", RNODE_BREAK(node)->nd_stts));
8297 ADD_INSN1(ret, line_node,
throw,
INT2FIX(throw_flag | TAG_BREAK));
8299 ADD_INSN(ret, line_node, pop);
8303 COMPILE_ERROR(ERROR_ARGS
"Invalid break");
8312 const NODE *line_node = node;
8313 unsigned long throw_flag = 0;
8315 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8316 LABEL *splabel = NEW_LABEL(0);
8317 debugs(
"next in while loop\n");
8318 ADD_LABEL(ret, splabel);
8319 CHECK(COMPILE(ret,
"next val/valid syntax?", RNODE_NEXT(node)->nd_stts));
8320 add_ensure_iseq(ret, iseq, 0);
8321 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8322 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8323 ADD_ADJUST_RESTORE(ret, splabel);
8325 ADD_INSN(ret, line_node, putnil);
8328 else if (ISEQ_COMPILE_DATA(iseq)->end_label && can_add_ensure_iseq(iseq)) {
8329 LABEL *splabel = NEW_LABEL(0);
8330 debugs(
"next in block\n");
8331 ADD_LABEL(ret, splabel);
8332 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8333 CHECK(COMPILE(ret,
"next val", RNODE_NEXT(node)->nd_stts));
8334 add_ensure_iseq(ret, iseq, 0);
8335 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8336 ADD_ADJUST_RESTORE(ret, splabel);
8339 ADD_INSN(ret, line_node, putnil);
8346 if (!ISEQ_COMPILE_DATA(ip)) {
8351 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8352 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8356 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8359 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8360 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with next");
8364 ip = ISEQ_BODY(ip)->parent_iseq;
8367 CHECK(COMPILE(ret,
"next val", RNODE_NEXT(node)->nd_stts));
8368 ADD_INSN1(ret, line_node,
throw,
INT2FIX(throw_flag | TAG_NEXT));
8371 ADD_INSN(ret, line_node, pop);
8375 COMPILE_ERROR(ERROR_ARGS
"Invalid next");
8385 const NODE *line_node = node;
8387 if (ISEQ_COMPILE_DATA(iseq)->redo_label && can_add_ensure_iseq(iseq)) {
8388 LABEL *splabel = NEW_LABEL(0);
8389 debugs(
"redo in while");
8390 ADD_LABEL(ret, splabel);
8391 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8392 add_ensure_iseq(ret, iseq, 0);
8393 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->redo_label);
8394 ADD_ADJUST_RESTORE(ret, splabel);
8396 ADD_INSN(ret, line_node, putnil);
8399 else if (ISEQ_BODY(iseq)->
type != ISEQ_TYPE_EVAL && ISEQ_COMPILE_DATA(iseq)->start_label && can_add_ensure_iseq(iseq)) {
8400 LABEL *splabel = NEW_LABEL(0);
8402 debugs(
"redo in block");
8403 ADD_LABEL(ret, splabel);
8404 add_ensure_iseq(ret, iseq, 0);
8405 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8406 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8407 ADD_ADJUST_RESTORE(ret, splabel);
8410 ADD_INSN(ret, line_node, putnil);
8417 if (!ISEQ_COMPILE_DATA(ip)) {
8422 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8425 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8428 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8429 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with redo");
8433 ip = ISEQ_BODY(ip)->parent_iseq;
8436 ADD_INSN(ret, line_node, putnil);
8437 ADD_INSN1(ret, line_node,
throw,
INT2FIX(VM_THROW_NO_ESCAPE_FLAG | TAG_REDO));
8440 ADD_INSN(ret, line_node, pop);
8444 COMPILE_ERROR(ERROR_ARGS
"Invalid redo");
8454 const NODE *line_node = node;
8456 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE) {
8457 ADD_INSN(ret, line_node, putnil);
8458 ADD_INSN1(ret, line_node,
throw,
INT2FIX(TAG_RETRY));
8461 ADD_INSN(ret, line_node, pop);
8465 COMPILE_ERROR(ERROR_ARGS
"Invalid retry");
8474 const int line = nd_line(node);
8475 const NODE *line_node = node;
8476 LABEL *lstart = NEW_LABEL(line);
8477 LABEL *lend = NEW_LABEL(line);
8478 LABEL *lcont = NEW_LABEL(line);
8479 const rb_iseq_t *rescue = NEW_CHILD_ISEQ(RNODE_RESCUE(node)->nd_resq,
8481 ISEQ_BODY(iseq)->location.label),
8482 ISEQ_TYPE_RESCUE, line);
8484 lstart->rescued = LABEL_RESCUE_BEG;
8485 lend->rescued = LABEL_RESCUE_END;
8486 ADD_LABEL(ret, lstart);
8488 bool prev_in_rescue = ISEQ_COMPILE_DATA(iseq)->in_rescue;
8489 ISEQ_COMPILE_DATA(iseq)->in_rescue =
true;
8491 CHECK(COMPILE(ret,
"rescue head", RNODE_RESCUE(node)->nd_head));
8493 ISEQ_COMPILE_DATA(iseq)->in_rescue = prev_in_rescue;
8495 ADD_LABEL(ret, lend);
8496 if (RNODE_RESCUE(node)->nd_else) {
8497 ADD_INSN(ret, line_node, pop);
8498 CHECK(COMPILE(ret,
"rescue else", RNODE_RESCUE(node)->nd_else));
8500 ADD_INSN(ret, line_node, nop);
8501 ADD_LABEL(ret, lcont);
8504 ADD_INSN(ret, line_node, pop);
8508 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lcont);
8509 ADD_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
8516 const int line = nd_line(node);
8517 const NODE *line_node = node;
8518 const NODE *resq = node;
8520 LABEL *label_miss, *label_hit;
8523 label_miss = NEW_LABEL(line);
8524 label_hit = NEW_LABEL(line);
8526 narg = RNODE_RESBODY(resq)->nd_args;
8528 switch (nd_type(narg)) {
8531 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8532 CHECK(COMPILE(ret,
"rescue arg", RNODE_LIST(narg)->nd_head));
8533 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8534 ADD_INSNL(ret, line_node, branchif, label_hit);
8535 narg = RNODE_LIST(narg)->nd_next;
8541 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8542 CHECK(COMPILE(ret,
"rescue/cond splat", narg));
8543 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE | VM_CHECKMATCH_ARRAY));
8544 ADD_INSNL(ret, line_node, branchif, label_hit);
8547 UNKNOWN_NODE(
"NODE_RESBODY", narg, COMPILE_NG);
8551 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8553 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8554 ADD_INSNL(ret, line_node, branchif, label_hit);
8556 ADD_INSNL(ret, line_node, jump, label_miss);
8557 ADD_LABEL(ret, label_hit);
8560 if (RNODE_RESBODY(resq)->nd_exc_var) {
8561 CHECK(COMPILE_POPPED(ret,
"resbody exc_var", RNODE_RESBODY(resq)->nd_exc_var));
8564 if (nd_type(RNODE_RESBODY(resq)->nd_body) == NODE_BEGIN && RNODE_BEGIN(RNODE_RESBODY(resq)->nd_body)->nd_body == NULL && !RNODE_RESBODY(resq)->nd_exc_var) {
8566 ADD_SYNTHETIC_INSN(ret, nd_line(RNODE_RESBODY(resq)->nd_body), -1, putnil);
8569 CHECK(COMPILE(ret,
"resbody body", RNODE_RESBODY(resq)->nd_body));
8572 if (ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization) {
8573 ADD_INSN(ret, line_node, nop);
8575 ADD_INSN(ret, line_node, leave);
8576 ADD_LABEL(ret, label_miss);
8577 resq = RNODE_RESBODY(resq)->nd_next;
8585 const int line = nd_line(RNODE_ENSURE(node)->nd_ensr);
8586 const NODE *line_node = node;
8588 const rb_iseq_t *ensure = NEW_CHILD_ISEQ(RNODE_ENSURE(node)->nd_ensr,
8590 ISEQ_TYPE_ENSURE, line);
8591 LABEL *lstart = NEW_LABEL(line);
8592 LABEL *lend = NEW_LABEL(line);
8593 LABEL *lcont = NEW_LABEL(line);
8601 CHECK(COMPILE_POPPED(ensr,
"ensure ensr", RNODE_ENSURE(node)->nd_ensr));
8603 last_leave = last && IS_INSN(last) && IS_INSN_ID(last, leave);
8608 push_ensure_entry(iseq, &enl, &er, RNODE_ENSURE(node)->nd_ensr);
8610 ADD_LABEL(ret, lstart);
8611 CHECK(COMPILE_(ret,
"ensure head", RNODE_ENSURE(node)->nd_head, (popped | last_leave)));
8612 ADD_LABEL(ret, lend);
8614 if (!popped && last_leave) ADD_INSN(ret, line_node, putnil);
8615 ADD_LABEL(ret, lcont);
8616 if (last_leave) ADD_INSN(ret, line_node, pop);
8618 erange = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->erange;
8619 if (lstart->link.next != &lend->link) {
8621 ADD_CATCH_ENTRY(CATCH_TYPE_ENSURE, erange->begin, erange->end,
8623 erange = erange->next;
8627 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl.prev;
8634 const NODE *line_node = node;
8637 enum rb_iseq_type
type = ISEQ_BODY(iseq)->type;
8639 enum rb_iseq_type t =
type;
8640 const NODE *retval = RNODE_RETURN(node)->nd_stts;
8643 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE) {
8644 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
8645 t = ISEQ_BODY(is)->type;
8649 case ISEQ_TYPE_MAIN:
8651 rb_warn(
"argument of top-level return is ignored");
8655 type = ISEQ_TYPE_METHOD;
8662 if (
type == ISEQ_TYPE_METHOD) {
8663 splabel = NEW_LABEL(0);
8664 ADD_LABEL(ret, splabel);
8665 ADD_ADJUST(ret, line_node, 0);
8668 CHECK(COMPILE(ret,
"return nd_stts (return val)", retval));
8670 if (
type == ISEQ_TYPE_METHOD && can_add_ensure_iseq(iseq)) {
8671 add_ensure_iseq(ret, iseq, 1);
8673 ADD_INSN(ret, line_node, leave);
8674 ADD_ADJUST_RESTORE(ret, splabel);
8677 ADD_INSN(ret, line_node, putnil);
8681 ADD_INSN1(ret, line_node,
throw,
INT2FIX(TAG_RETURN));
8683 ADD_INSN(ret, line_node, pop);
8694 if (!i)
return false;
8695 if (IS_TRACE(i)) i = i->prev;
8696 if (!IS_INSN(i) || !IS_INSN_ID(i, putnil))
return false;
8698 if (IS_ADJUST(i)) i = i->prev;
8699 if (!IS_INSN(i))
return false;
8700 switch (INSN_OF(i)) {
8707 (ret->last = last->prev)->next = NULL;
8714 CHECK(COMPILE_(ret,
"nd_body", node, popped));
8716 if (!popped && !all_string_result_p(node)) {
8717 const NODE *line_node = node;
8718 const unsigned int flag = VM_CALL_FCALL;
8722 ADD_INSN(ret, line_node, dup);
8723 ADD_INSN1(ret, line_node, objtostring, new_callinfo(iseq, idTo_s, 0, flag, NULL, FALSE));
8724 ADD_INSN(ret, line_node, anytostring);
8732 int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
8734 debugs(
"id: %s idx: %d\n",
rb_id2name(
id), idx);
8735 ADD_GETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
8741 LABEL *else_label = NEW_LABEL(nd_line(line_node));
8744 br = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"&.");
8746 ADD_INSN(recv, line_node, dup);
8747 ADD_INSNL(recv, line_node, branchnil, else_label);
8748 add_trace_branch_coverage(iseq, recv, nd_code_loc(node), nd_node_id(node), 0,
"then", br);
8756 if (!else_label)
return;
8757 end_label = NEW_LABEL(nd_line(line_node));
8758 ADD_INSNL(ret, line_node, jump, end_label);
8759 ADD_LABEL(ret, else_label);
8760 add_trace_branch_coverage(iseq, ret, nd_code_loc(node), nd_node_id(node), 1,
"else", branches);
8761 ADD_LABEL(ret, end_label);
8770 if (get_nd_recv(node) &&
8771 (nd_type_p(get_nd_recv(node), NODE_STR) || nd_type_p(get_nd_recv(node), NODE_FILE)) &&
8772 (get_node_call_nd_mid(node) == idFreeze || get_node_call_nd_mid(node) == idUMinus) &&
8773 get_nd_args(node) == NULL &&
8774 ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
8775 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
8776 VALUE str = get_string_value(get_nd_recv(node));
8777 if (get_node_call_nd_mid(node) == idUMinus) {
8778 ADD_INSN2(ret, line_node, opt_str_uminus, str,
8779 new_callinfo(iseq, idUMinus, 0, 0, NULL, FALSE));
8782 ADD_INSN2(ret, line_node, opt_str_freeze, str,
8783 new_callinfo(iseq, idFreeze, 0, 0, NULL, FALSE));
8787 ADD_INSN(ret, line_node, pop);
8794 if (get_node_call_nd_mid(node) == idAREF && !private_recv_p(node) && get_nd_args(node) &&
8795 nd_type_p(get_nd_args(node), NODE_LIST) && RNODE_LIST(get_nd_args(node))->as.nd_alen == 1 &&
8796 (nd_type_p(RNODE_LIST(get_nd_args(node))->nd_head, NODE_STR) || nd_type_p(RNODE_LIST(get_nd_args(node))->nd_head, NODE_FILE)) &&
8797 ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
8798 !frozen_string_literal_p(iseq) &&
8799 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
8800 VALUE str = get_string_value(RNODE_LIST(get_nd_args(node))->nd_head);
8801 CHECK(COMPILE(ret,
"recv", get_nd_recv(node)));
8802 ADD_INSN2(ret, line_node, opt_aref_with, str,
8803 new_callinfo(iseq, idAREF, 1, 0, NULL, FALSE));
8806 ADD_INSN(ret, line_node, pop);
8814 iseq_has_builtin_function_table(
const rb_iseq_t *iseq)
8816 return ISEQ_COMPILE_DATA(iseq)->builtin_function_table != NULL;
8820 iseq_builtin_function_lookup(
const rb_iseq_t *iseq,
const char *name)
8823 const struct rb_builtin_function *table = ISEQ_COMPILE_DATA(iseq)->builtin_function_table;
8824 for (i=0; table[i].index != -1; i++) {
8825 if (strcmp(table[i].name, name) == 0) {
8833 iseq_builtin_function_name(
const enum node_type
type,
const NODE *recv,
ID mid)
8836 static const char prefix[] =
"__builtin_";
8837 const size_t prefix_len =
sizeof(prefix) - 1;
8842 switch (nd_type(recv)) {
8844 if (RNODE_VCALL(recv)->nd_mid ==
rb_intern(
"__builtin")) {
8849 if (RNODE_CONST(recv)->nd_vid ==
rb_intern(
"Primitive")) {
8859 if (UNLIKELY(strncmp(prefix, name, prefix_len) == 0)) {
8860 return &name[prefix_len];
8869 delegate_call_p(
const rb_iseq_t *iseq,
unsigned int argc,
const LINK_ANCHOR *args,
unsigned int *pstart_index)
8876 else if (argc <= ISEQ_BODY(iseq)->local_table_size) {
8877 unsigned int start=0;
8882 argc + start <= ISEQ_BODY(iseq)->local_table_size;
8886 for (
unsigned int i=start; i-start<argc; i++) {
8887 if (IS_INSN(elem) &&
8888 INSN_OF(elem) == BIN(getlocal)) {
8889 int local_index =
FIX2INT(OPERAND_AT(elem, 0));
8890 int local_level =
FIX2INT(OPERAND_AT(elem, 1));
8892 if (local_level == 0) {
8893 unsigned int index = ISEQ_BODY(iseq)->local_table_size - (local_index - VM_ENV_DATA_SIZE + 1);
8895 fprintf(stderr,
"lvar:%s (%d), id:%s (%d) local_index:%d, local_size:%d\n",
8896 rb_id2name(ISEQ_BODY(iseq)->local_table[i]), i,
8897 rb_id2name(ISEQ_BODY(iseq)->local_table[index]), index,
8898 local_index, (
int)ISEQ_BODY(iseq)->local_table_size);
8922 *pstart_index = start;
8936 if (!node)
goto no_arg;
8938 if (!nd_type_p(node, NODE_LIST))
goto bad_arg;
8939 const NODE *next = RNODE_LIST(node)->nd_next;
8941 node = RNODE_LIST(node)->nd_head;
8942 if (!node)
goto no_arg;
8943 switch (nd_type(node)) {
8945 symbol = rb_node_sym_string_val(node);
8951 if (!
SYMBOL_P(symbol))
goto non_symbol_arg;
8955 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_LEAF;
8957 else if (strcmp(
RSTRING_PTR(
string),
"inline_block") == 0) {
8958 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_INLINE_BLOCK;
8960 else if (strcmp(
RSTRING_PTR(
string),
"use_block") == 0) {
8961 iseq_set_use_block(iseq);
8963 else if (strcmp(
RSTRING_PTR(
string),
"c_trace") == 0) {
8965 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_C_TRACE;
8974 COMPILE_ERROR(ERROR_ARGS
"attr!: no argument");
8977 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to attr!: %s", rb_builtin_class_name(symbol));
8980 COMPILE_ERROR(ERROR_ARGS
"unknown argument to attr!: %s",
RSTRING_PTR(
string));
8983 UNKNOWN_NODE(
"attr!", node, COMPILE_NG);
8991 if (!node)
goto no_arg;
8992 if (!nd_type_p(node, NODE_LIST))
goto bad_arg;
8993 if (RNODE_LIST(node)->nd_next)
goto too_many_arg;
8994 node = RNODE_LIST(node)->nd_head;
8995 if (!node)
goto no_arg;
8996 switch (nd_type(node)) {
8998 name = rb_node_sym_string_val(node);
9003 if (!
SYMBOL_P(name))
goto non_symbol_arg;
9005 compile_lvar(iseq, ret, line_node,
SYM2ID(name));
9009 COMPILE_ERROR(ERROR_ARGS
"arg!: no argument");
9012 COMPILE_ERROR(ERROR_ARGS
"arg!: too many argument");
9015 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to arg!: %s",
9016 rb_builtin_class_name(name));
9019 UNKNOWN_NODE(
"arg!", node, COMPILE_NG);
9023 mandatory_node(
const rb_iseq_t *iseq,
const NODE *cond_node)
9025 const NODE *node = ISEQ_COMPILE_DATA(iseq)->root_node;
9026 if (nd_type(node) == NODE_IF && RNODE_IF(node)->nd_cond == cond_node) {
9027 return RNODE_IF(node)->nd_body;
9030 rb_bug(
"mandatory_node: can't find mandatory node");
9035 compile_builtin_mandatory_only_method(
rb_iseq_t *iseq,
const NODE *node,
const NODE *line_node)
9039 .pre_args_num = ISEQ_BODY(iseq)->param.lead_num,
9042 rb_node_init(RNODE(&args_node), NODE_ARGS);
9043 args_node.nd_ainfo = args;
9046 const int skip_local_size = ISEQ_BODY(iseq)->param.size - ISEQ_BODY(iseq)->param.lead_num;
9047 const int table_size = ISEQ_BODY(iseq)->local_table_size - skip_local_size;
9051 tbl->size = table_size;
9056 for (i=0; i<ISEQ_BODY(iseq)->param.lead_num; i++) {
9057 tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i];
9060 for (; i<table_size; i++) {
9061 tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i + skip_local_size];
9065 rb_node_init(RNODE(&scope_node), NODE_SCOPE);
9066 scope_node.nd_tbl = tbl;
9067 scope_node.nd_body = mandatory_node(iseq, node);
9068 scope_node.nd_args = &args_node;
9070 VALUE ast_value = rb_ruby_ast_new(RNODE(&scope_node));
9072 ISEQ_BODY(iseq)->mandatory_only_iseq =
9073 rb_iseq_new_with_opt(ast_value, rb_iseq_base_label(iseq),
9074 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
9075 nd_line(line_node), NULL, 0,
9076 ISEQ_TYPE_METHOD, ISEQ_COMPILE_DATA(iseq)->option,
9077 ISEQ_BODY(iseq)->variable.script_lines);
9087 NODE *args_node = get_nd_args(node);
9089 if (parent_block != NULL) {
9090 COMPILE_ERROR(ERROR_ARGS_AT(line_node)
"should not call builtins here.");
9094 # define BUILTIN_INLINE_PREFIX "_bi"
9095 char inline_func[
sizeof(BUILTIN_INLINE_PREFIX) +
DECIMAL_SIZE_OF(
int)];
9096 bool cconst =
false;
9101 if (strcmp(
"cstmt!", builtin_func) == 0 ||
9102 strcmp(
"cexpr!", builtin_func) == 0) {
9105 else if (strcmp(
"cconst!", builtin_func) == 0) {
9108 else if (strcmp(
"cinit!", builtin_func) == 0) {
9112 else if (strcmp(
"attr!", builtin_func) == 0) {
9113 return compile_builtin_attr(iseq, args_node);
9115 else if (strcmp(
"arg!", builtin_func) == 0) {
9116 return compile_builtin_arg(iseq, ret, args_node, line_node, popped);
9118 else if (strcmp(
"mandatory_only?", builtin_func) == 0) {
9120 rb_bug(
"mandatory_only? should be in if condition");
9122 else if (!LIST_INSN_SIZE_ZERO(ret)) {
9123 rb_bug(
"mandatory_only? should be put on top");
9126 ADD_INSN1(ret, line_node, putobject,
Qfalse);
9127 return compile_builtin_mandatory_only_method(iseq, node, line_node);
9130 rb_bug(
"can't find builtin function:%s", builtin_func);
9133 COMPILE_ERROR(ERROR_ARGS
"can't find builtin function:%s", builtin_func);
9137 int inline_index = nd_line(node);
9138 snprintf(inline_func,
sizeof(inline_func), BUILTIN_INLINE_PREFIX
"%d", inline_index);
9139 builtin_func = inline_func;
9145 typedef VALUE(*builtin_func0)(
void *,
VALUE);
9146 VALUE const_val = (*(builtin_func0)(uintptr_t)bf->func_ptr)(NULL,
Qnil);
9147 ADD_INSN1(ret, line_node, putobject, const_val);
9153 unsigned int flag = 0;
9155 VALUE argc = setup_args(iseq, args, args_node, &flag, &keywords);
9157 if (
FIX2INT(argc) != bf->argc) {
9158 COMPILE_ERROR(ERROR_ARGS
"argc is not match for builtin function:%s (expect %d but %d)",
9159 builtin_func, bf->argc,
FIX2INT(argc));
9163 unsigned int start_index;
9164 if (delegate_call_p(iseq,
FIX2INT(argc), args, &start_index)) {
9165 ADD_INSN2(ret, line_node, opt_invokebuiltin_delegate, bf,
INT2FIX(start_index));
9169 ADD_INSN1(ret, line_node, invokebuiltin, bf);
9172 if (popped) ADD_INSN(ret, line_node, pop);
9178 compile_call(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
const enum node_type
type,
const NODE *
const line_node,
int popped,
bool assume_receiver)
9186 ID mid = get_node_call_nd_mid(node);
9188 unsigned int flag = 0;
9190 const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9191 LABEL *else_label = NULL;
9194 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
9198 #if OPT_SUPPORT_JOKE
9199 if (nd_type_p(node, NODE_VCALL)) {
9204 CONST_ID(id_answer,
"the_answer_to_life_the_universe_and_everything");
9206 if (mid == id_bitblt) {
9207 ADD_INSN(ret, line_node, bitblt);
9210 else if (mid == id_answer) {
9211 ADD_INSN(ret, line_node, answer);
9223 if (nd_type_p(node, NODE_FCALL) &&
9224 (mid == goto_id || mid == label_id)) {
9227 st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
9230 if (!labels_table) {
9231 labels_table = st_init_numtable();
9232 ISEQ_COMPILE_DATA(iseq)->labels_table = labels_table;
9235 COMPILE_ERROR(ERROR_ARGS
"invalid goto/label format");
9239 if (mid == goto_id) {
9240 ADD_INSNL(ret, line_node, jump, label);
9243 ADD_LABEL(ret, label);
9250 const char *builtin_func;
9251 if (UNLIKELY(iseq_has_builtin_function_table(iseq)) &&
9252 (builtin_func = iseq_builtin_function_name(
type, get_nd_recv(node), mid)) != NULL) {
9253 return compile_builtin_function_call(iseq, ret, node, line_node, popped, parent_block, args, builtin_func);
9257 if (!assume_receiver) {
9258 if (
type == NODE_CALL ||
type == NODE_OPCALL ||
type == NODE_QCALL) {
9261 if (mid == idCall &&
9262 nd_type_p(get_nd_recv(node), NODE_LVAR) &&
9263 iseq_block_param_id_p(iseq, RNODE_LVAR(get_nd_recv(node))->nd_vid, &idx, &level)) {
9264 ADD_INSN2(recv, get_nd_recv(node), getblockparamproxy,
INT2FIX(idx + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
9266 else if (private_recv_p(node)) {
9267 ADD_INSN(recv, node, putself);
9268 flag |= VM_CALL_FCALL;
9271 CHECK(COMPILE(recv,
"recv", get_nd_recv(node)));
9274 if (
type == NODE_QCALL) {
9275 else_label = qcall_branch_start(iseq, recv, &branches, node, line_node);
9278 else if (
type == NODE_FCALL ||
type == NODE_VCALL) {
9279 ADD_CALL_RECEIVER(recv, line_node);
9284 if (
type != NODE_VCALL) {
9285 argc = setup_args(iseq, args, get_nd_args(node), &flag, &keywords);
9286 CHECK(!
NIL_P(argc));
9295 debugp_param(
"call args argc", argc);
9296 debugp_param(
"call method",
ID2SYM(mid));
9298 switch ((
int)
type) {
9300 flag |= VM_CALL_VCALL;
9303 flag |= VM_CALL_FCALL;
9306 if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
9307 ADD_INSN(ret, line_node, splatkw);
9309 ADD_SEND_R(ret, line_node, mid, argc, parent_block,
INT2FIX(flag), keywords);
9311 qcall_branch_end(iseq, ret, else_label, branches, node, line_node);
9313 ADD_INSN(ret, line_node, pop);
9321 const int line = nd_line(node);
9323 unsigned int flag = 0;
9325 ID id = RNODE_OP_ASGN1(node)->nd_mid;
9351 ADD_INSN(ret, node, putnil);
9353 asgnflag = COMPILE_RECV(ret,
"NODE_OP_ASGN1 recv", node, RNODE_OP_ASGN1(node)->nd_recv);
9354 CHECK(asgnflag != -1);
9355 switch (nd_type(RNODE_OP_ASGN1(node)->nd_index)) {
9360 argc = setup_args(iseq, ret, RNODE_OP_ASGN1(node)->nd_index, &flag, NULL);
9361 CHECK(!
NIL_P(argc));
9363 int dup_argn =
FIX2INT(argc) + 1;
9364 ADD_INSN1(ret, node, dupn,
INT2FIX(dup_argn));
9366 ADD_SEND_R(ret, node, idAREF, argc, NULL,
INT2FIX(flag & ~VM_CALL_ARGS_SPLAT_MUT), NULL);
9368 if (
id == idOROP ||
id == idANDOP) {
9377 LABEL *label = NEW_LABEL(line);
9378 LABEL *lfin = NEW_LABEL(line);
9380 ADD_INSN(ret, node, dup);
9382 ADD_INSNL(ret, node, branchif, label);
9385 ADD_INSNL(ret, node, branchunless, label);
9387 ADD_INSN(ret, node, pop);
9389 CHECK(COMPILE(ret,
"NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9391 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9393 if (flag & VM_CALL_ARGS_SPLAT) {
9394 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9395 ADD_INSN(ret, node, swap);
9396 ADD_INSN1(ret, node, splatarray,
Qtrue);
9397 ADD_INSN(ret, node, swap);
9398 flag |= VM_CALL_ARGS_SPLAT_MUT;
9400 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9401 ADD_SEND_R(ret, node, idASET, argc, NULL,
INT2FIX(flag), NULL);
9404 ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL,
INT2FIX(flag), NULL);
9406 ADD_INSN(ret, node, pop);
9407 ADD_INSNL(ret, node, jump, lfin);
9408 ADD_LABEL(ret, label);
9410 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9412 ADD_INSN1(ret, node, adjuststack,
INT2FIX(dup_argn+1));
9413 ADD_LABEL(ret, lfin);
9416 CHECK(COMPILE(ret,
"NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9417 ADD_SEND(ret, node,
id,
INT2FIX(1));
9419 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9421 if (flag & VM_CALL_ARGS_SPLAT) {
9422 if (flag & VM_CALL_KW_SPLAT) {
9423 ADD_INSN1(ret, node, topn,
INT2FIX(2));
9424 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9425 ADD_INSN1(ret, node, splatarray,
Qtrue);
9426 flag |= VM_CALL_ARGS_SPLAT_MUT;
9428 ADD_INSN(ret, node, swap);
9429 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9430 ADD_INSN1(ret, node, setn,
INT2FIX(2));
9431 ADD_INSN(ret, node, pop);
9434 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9435 ADD_INSN(ret, node, swap);
9436 ADD_INSN1(ret, node, splatarray,
Qtrue);
9437 ADD_INSN(ret, node, swap);
9438 flag |= VM_CALL_ARGS_SPLAT_MUT;
9440 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9442 ADD_SEND_R(ret, node, idASET, argc, NULL,
INT2FIX(flag), NULL);
9445 ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL,
INT2FIX(flag), NULL);
9447 ADD_INSN(ret, node, pop);
9455 const int line = nd_line(node);
9456 ID atype = RNODE_OP_ASGN2(node)->nd_mid;
9459 LABEL *lfin = NEW_LABEL(line);
9460 LABEL *lcfin = NEW_LABEL(line);
9515 asgnflag = COMPILE_RECV(ret,
"NODE_OP_ASGN2#recv", node, RNODE_OP_ASGN2(node)->nd_recv);
9516 CHECK(asgnflag != -1);
9517 if (RNODE_OP_ASGN2(node)->nd_aid) {
9518 lskip = NEW_LABEL(line);
9519 ADD_INSN(ret, node, dup);
9520 ADD_INSNL(ret, node, branchnil, lskip);
9522 ADD_INSN(ret, node, dup);
9523 ADD_SEND_WITH_FLAG(ret, node, vid,
INT2FIX(0),
INT2FIX(asgnflag));
9525 if (atype == idOROP || atype == idANDOP) {
9527 ADD_INSN(ret, node, dup);
9529 if (atype == idOROP) {
9530 ADD_INSNL(ret, node, branchif, lcfin);
9533 ADD_INSNL(ret, node, branchunless, lcfin);
9536 ADD_INSN(ret, node, pop);
9538 CHECK(COMPILE(ret,
"NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9540 ADD_INSN(ret, node, swap);
9541 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9543 ADD_SEND_WITH_FLAG(ret, node, aid,
INT2FIX(1),
INT2FIX(asgnflag));
9544 ADD_INSNL(ret, node, jump, lfin);
9546 ADD_LABEL(ret, lcfin);
9548 ADD_INSN(ret, node, swap);
9551 ADD_LABEL(ret, lfin);
9554 CHECK(COMPILE(ret,
"NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9555 ADD_SEND(ret, node, atype,
INT2FIX(1));
9557 ADD_INSN(ret, node, swap);
9558 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9560 ADD_SEND_WITH_FLAG(ret, node, aid,
INT2FIX(1),
INT2FIX(asgnflag));
9562 if (lskip && popped) {
9563 ADD_LABEL(ret, lskip);
9565 ADD_INSN(ret, node, pop);
9566 if (lskip && !popped) {
9567 ADD_LABEL(ret, lskip);
9572 static int compile_shareable_constant_value(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
const NODE *lhs,
const NODE *value);
9577 const int line = nd_line(node);
9582 switch (nd_type(RNODE_OP_CDECL(node)->nd_head)) {
9584 ADD_INSN1(ret, node, putobject, rb_cObject);
9587 CHECK(COMPILE(ret,
"NODE_OP_CDECL/colon2#nd_head", RNODE_COLON2(RNODE_OP_CDECL(node)->nd_head)->nd_head));
9590 COMPILE_ERROR(ERROR_ARGS
"%s: invalid node in NODE_OP_CDECL",
9591 ruby_node_name(nd_type(RNODE_OP_CDECL(node)->nd_head)));
9594 mid = get_node_colon_nd_mid(RNODE_OP_CDECL(node)->nd_head);
9596 if (RNODE_OP_CDECL(node)->nd_aid == idOROP) {
9597 lassign = NEW_LABEL(line);
9598 ADD_INSN(ret, node, dup);
9599 ADD_INSN3(ret, node, defined,
INT2FIX(DEFINED_CONST_FROM),
9601 ADD_INSNL(ret, node, branchunless, lassign);
9603 ADD_INSN(ret, node, dup);
9604 ADD_INSN1(ret, node, putobject,
Qtrue);
9605 ADD_INSN1(ret, node, getconstant,
ID2SYM(mid));
9607 if (RNODE_OP_CDECL(node)->nd_aid == idOROP || RNODE_OP_CDECL(node)->nd_aid == idANDOP) {
9608 lfin = NEW_LABEL(line);
9609 if (!popped) ADD_INSN(ret, node, dup);
9610 if (RNODE_OP_CDECL(node)->nd_aid == idOROP)
9611 ADD_INSNL(ret, node, branchif, lfin);
9613 ADD_INSNL(ret, node, branchunless, lfin);
9615 if (!popped) ADD_INSN(ret, node, pop);
9616 if (lassign) ADD_LABEL(ret, lassign);
9617 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9620 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9622 ADD_INSN1(ret, node, dupn,
INT2FIX(2));
9623 ADD_INSN(ret, node, swap);
9625 ADD_INSN1(ret, node, setconstant,
ID2SYM(mid));
9626 ADD_LABEL(ret, lfin);
9627 if (!popped) ADD_INSN(ret, node, swap);
9628 ADD_INSN(ret, node, pop);
9631 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9633 ADD_CALL(ret, node, RNODE_OP_CDECL(node)->nd_aid,
INT2FIX(1));
9635 ADD_INSN(ret, node, swap);
9637 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9638 ADD_INSN(ret, node, swap);
9640 ADD_INSN1(ret, node, setconstant,
ID2SYM(mid));
9648 const int line = nd_line(node);
9649 LABEL *lfin = NEW_LABEL(line);
9652 if (
type == NODE_OP_ASGN_OR && !nd_type_p(RNODE_OP_ASGN_OR(node)->nd_head, NODE_IVAR)) {
9656 defined_expr(iseq, ret, RNODE_OP_ASGN_OR(node)->nd_head, lfinish,
Qfalse,
false);
9657 lassign = lfinish[1];
9659 lassign = NEW_LABEL(line);
9661 ADD_INSNL(ret, node, branchunless, lassign);
9664 lassign = NEW_LABEL(line);
9667 CHECK(COMPILE(ret,
"NODE_OP_ASGN_AND/OR#nd_head", RNODE_OP_ASGN_OR(node)->nd_head));
9670 ADD_INSN(ret, node, dup);
9673 if (
type == NODE_OP_ASGN_AND) {
9674 ADD_INSNL(ret, node, branchunless, lfin);
9677 ADD_INSNL(ret, node, branchif, lfin);
9681 ADD_INSN(ret, node, pop);
9684 ADD_LABEL(ret, lassign);
9685 CHECK(COMPILE_(ret,
"NODE_OP_ASGN_AND/OR#nd_value", RNODE_OP_ASGN_OR(node)->nd_value, popped));
9686 ADD_LABEL(ret, lfin);
9696 unsigned int flag = 0;
9698 const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9702 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
9704 if (
type == NODE_SUPER) {
9705 VALUE vargc = setup_args(iseq, args, RNODE_SUPER(node)->nd_args, &flag, &keywords);
9706 CHECK(!
NIL_P(vargc));
9708 if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
9709 ADD_INSN(args, node, splatkw);
9712 if (flag & VM_CALL_ARGS_BLOCKARG) {
9719 const rb_iseq_t *liseq = body->local_iseq;
9721 const struct rb_iseq_param_keyword *
const local_kwd = local_body->
param.keyword;
9722 int lvar_level = get_lvar_level(iseq);
9724 argc = local_body->
param.lead_num;
9727 for (i = 0; i < local_body->
param.lead_num; i++) {
9728 int idx = local_body->local_table_size - i;
9729 ADD_GETLOCAL(args, node, idx, lvar_level);
9733 if (local_body->
param.flags.forwardable) {
9734 flag |= VM_CALL_FORWARDING;
9735 int idx = local_body->local_table_size - get_local_var_idx(liseq, idDot3);
9736 ADD_GETLOCAL(args, node, idx, lvar_level);
9739 if (local_body->
param.flags.has_opt) {
9742 for (j = 0; j < local_body->
param.opt_num; j++) {
9743 int idx = local_body->local_table_size - (i + j);
9744 ADD_GETLOCAL(args, node, idx, lvar_level);
9749 if (local_body->
param.flags.has_rest) {
9751 int idx = local_body->local_table_size - local_body->
param.rest_start;
9752 ADD_GETLOCAL(args, node, idx, lvar_level);
9753 ADD_INSN1(args, node, splatarray, RBOOL(local_body->
param.flags.has_post));
9755 argc = local_body->
param.rest_start + 1;
9756 flag |= VM_CALL_ARGS_SPLAT;
9758 if (local_body->
param.flags.has_post) {
9760 int post_len = local_body->
param.post_num;
9761 int post_start = local_body->
param.post_start;
9763 if (local_body->
param.flags.has_rest) {
9765 for (j=0; j<post_len; j++) {
9766 int idx = local_body->local_table_size - (post_start + j);
9767 ADD_GETLOCAL(args, node, idx, lvar_level);
9769 ADD_INSN1(args, node, pushtoarray,
INT2FIX(j));
9770 flag |= VM_CALL_ARGS_SPLAT_MUT;
9775 for (j=0; j<post_len; j++) {
9776 int idx = local_body->local_table_size - (post_start + j);
9777 ADD_GETLOCAL(args, node, idx, lvar_level);
9779 argc = post_len + post_start;
9783 if (local_body->
param.flags.has_kw) {
9784 int local_size = local_body->local_table_size;
9787 ADD_INSN1(args, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
9789 if (local_body->
param.flags.has_kwrest) {
9790 int idx = local_body->local_table_size - local_kwd->rest_start;
9791 ADD_GETLOCAL(args, node, idx, lvar_level);
9796 ADD_INSN1(args, node, newhash,
INT2FIX(0));
9798 for (i = 0; i < local_kwd->num; ++i) {
9799 ID id = local_kwd->table[i];
9800 int idx = local_size - get_local_var_idx(liseq,
id);
9801 ADD_INSN1(args, node, putobject,
ID2SYM(
id));
9802 ADD_GETLOCAL(args, node, idx, lvar_level);
9804 ADD_SEND(args, node, id_core_hash_merge_ptr,
INT2FIX(i * 2 + 1));
9805 flag |= VM_CALL_KW_SPLAT| VM_CALL_KW_SPLAT_MUT;
9807 else if (local_body->
param.flags.has_kwrest) {
9808 int idx = local_body->local_table_size - local_kwd->rest_start;
9809 ADD_GETLOCAL(args, node, idx, lvar_level);
9811 flag |= VM_CALL_KW_SPLAT;
9815 if (use_block && parent_block == NULL) {
9816 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
9819 flag |= VM_CALL_SUPER | VM_CALL_FCALL;
9820 if (
type == NODE_ZSUPER) flag |= VM_CALL_ZSUPER;
9821 ADD_INSN(ret, node, putself);
9824 const struct rb_callinfo * ci = new_callinfo(iseq, 0, argc, flag, keywords, parent_block != NULL);
9826 if (vm_ci_flag(ci) & VM_CALL_FORWARDING) {
9827 ADD_INSN2(ret, node, invokesuperforward, ci, parent_block);
9830 ADD_INSN2(ret, node, invokesuper, ci, parent_block);
9834 ADD_INSN(ret, node, pop);
9844 unsigned int flag = 0;
9849 switch (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->
type) {
9851 case ISEQ_TYPE_MAIN:
9852 case ISEQ_TYPE_CLASS:
9853 COMPILE_ERROR(ERROR_ARGS
"Invalid yield");
9858 if (RNODE_YIELD(node)->nd_head) {
9859 argc = setup_args(iseq, args, RNODE_YIELD(node)->nd_head, &flag, &keywords);
9860 CHECK(!
NIL_P(argc));
9867 ADD_INSN1(ret, node, invokeblock, new_callinfo(iseq, 0,
FIX2INT(argc), flag, keywords, FALSE));
9868 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
9871 ADD_INSN(ret, node, pop);
9876 for (; tmp_iseq != ISEQ_BODY(iseq)->local_iseq; level++ ) {
9877 tmp_iseq = ISEQ_BODY(tmp_iseq)->parent_iseq;
9879 if (level > 0) access_outer_variables(iseq, level,
rb_intern(
"yield"),
true);
9892 switch ((
int)
type) {
9894 ADD_INSN1(recv, node, putobject, rb_node_regx_string_val(node));
9895 ADD_INSN2(val, node, getspecial,
INT2FIX(0),
9899 CHECK(COMPILE(recv,
"receiver", RNODE_MATCH2(node)->nd_recv));
9900 CHECK(COMPILE(val,
"value", RNODE_MATCH2(node)->nd_value));
9903 CHECK(COMPILE(recv,
"receiver", RNODE_MATCH3(node)->nd_value));
9904 CHECK(COMPILE(val,
"value", RNODE_MATCH3(node)->nd_recv));
9910 ADD_SEND(ret, node, idEqTilde,
INT2FIX(1));
9912 if (nd_type_p(node, NODE_MATCH2) && RNODE_MATCH2(node)->nd_args) {
9913 compile_named_capture_assign(iseq, ret, RNODE_MATCH2(node)->nd_args);
9917 ADD_INSN(ret, node, pop);
9928 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache &&
9929 (segments = collect_const_segments(iseq, node))) {
9930 ISEQ_BODY(iseq)->ic_size++;
9931 ADD_INSN1(ret, node, opt_getconstant_path, segments);
9941 CHECK(compile_const_prefix(iseq, node, pref, body));
9942 if (LIST_INSN_SIZE_ZERO(pref)) {
9943 ADD_INSN(ret, node, putnil);
9954 ADD_CALL_RECEIVER(ret, node);
9955 CHECK(COMPILE(ret,
"colon2#nd_head", RNODE_COLON2(node)->nd_head));
9956 ADD_CALL(ret, node, RNODE_COLON2(node)->nd_mid,
INT2FIX(1));
9959 ADD_INSN(ret, node, pop);
9967 debugi(
"colon3#nd_mid", RNODE_COLON3(node)->nd_mid);
9970 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
9971 ISEQ_BODY(iseq)->ic_size++;
9973 ADD_INSN1(ret, node, opt_getconstant_path, segments);
9977 ADD_INSN1(ret, node, putobject, rb_cObject);
9978 ADD_INSN1(ret, node, putobject,
Qtrue);
9979 ADD_INSN1(ret, node, getconstant,
ID2SYM(RNODE_COLON3(node)->nd_mid));
9983 ADD_INSN(ret, node, pop);
9992 const NODE *b = RNODE_DOT2(node)->nd_beg;
9993 const NODE *e = RNODE_DOT2(node)->nd_end;
9995 if (optimizable_range_item_p(b) && optimizable_range_item_p(e)) {
9997 VALUE bv = optimized_range_item(b);
9998 VALUE ev = optimized_range_item(e);
10000 ADD_INSN1(ret, node, putobject, val);
10005 CHECK(COMPILE_(ret,
"min", b, popped));
10006 CHECK(COMPILE_(ret,
"max", e, popped));
10008 ADD_INSN1(ret, node, newrange, flag);
10018 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE) {
10019 ADD_GETLOCAL(ret, node, LVAR_ERRINFO, 0);
10025 if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_RESCUE) {
10028 ip = ISEQ_BODY(ip)->parent_iseq;
10032 ADD_GETLOCAL(ret, node, LVAR_ERRINFO, level);
10035 ADD_INSN(ret, node, putnil);
10046 LABEL *end_label = NEW_LABEL(nd_line(node));
10047 const NODE *default_value = get_nd_value(RNODE_KW_ARG(node)->nd_body);
10049 if (default_value == NODE_SPECIAL_REQUIRED_KEYWORD) {
10051 COMPILE_ERROR(ERROR_ARGS
"unreachable");
10054 else if (nd_type_p(default_value, NODE_SYM) ||
10055 nd_type_p(default_value, NODE_REGX) ||
10056 nd_type_p(default_value, NODE_LINE) ||
10057 nd_type_p(default_value, NODE_INTEGER) ||
10058 nd_type_p(default_value, NODE_FLOAT) ||
10059 nd_type_p(default_value, NODE_RATIONAL) ||
10060 nd_type_p(default_value, NODE_IMAGINARY) ||
10061 nd_type_p(default_value, NODE_NIL) ||
10062 nd_type_p(default_value, NODE_TRUE) ||
10063 nd_type_p(default_value, NODE_FALSE)) {
10064 COMPILE_ERROR(ERROR_ARGS
"unreachable");
10072 int kw_bits_idx = body->local_table_size - body->
param.keyword->bits_start;
10073 int keyword_idx = body->
param.keyword->num;
10075 ADD_INSN2(ret, node, checkkeyword,
INT2FIX(kw_bits_idx + VM_ENV_DATA_SIZE - 1),
INT2FIX(keyword_idx));
10076 ADD_INSNL(ret, node, branchif, end_label);
10077 CHECK(COMPILE_POPPED(ret,
"keyword default argument", RNODE_KW_ARG(node)->nd_body));
10078 ADD_LABEL(ret, end_label);
10088 unsigned int flag = 0;
10089 ID mid = RNODE_ATTRASGN(node)->nd_mid;
10091 LABEL *else_label = NULL;
10097 if (mid == idASET && !private_recv_p(node) && RNODE_ATTRASGN(node)->nd_args &&
10098 nd_type_p(RNODE_ATTRASGN(node)->nd_args, NODE_LIST) && RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->as.nd_alen == 2 &&
10099 (nd_type_p(RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->nd_head, NODE_STR) || nd_type_p(RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->nd_head, NODE_FILE)) &&
10100 ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
10101 !frozen_string_literal_p(iseq) &&
10102 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction)
10104 VALUE str = get_string_value(RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->nd_head);
10105 CHECK(COMPILE(ret,
"recv", RNODE_ATTRASGN(node)->nd_recv));
10106 CHECK(COMPILE(ret,
"value", RNODE_LIST(RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->nd_next)->nd_head));
10108 ADD_INSN(ret, node, swap);
10109 ADD_INSN1(ret, node, topn,
INT2FIX(1));
10111 ADD_INSN2(ret, node, opt_aset_with, str,
10112 new_callinfo(iseq, idASET, 2, 0, NULL, FALSE));
10114 ADD_INSN(ret, node, pop);
10120 argc = setup_args(iseq, args, RNODE_ATTRASGN(node)->nd_args, &flag, NULL);
10121 CHECK(!
NIL_P(argc));
10123 int asgnflag = COMPILE_RECV(recv,
"recv", node, RNODE_ATTRASGN(node)->nd_recv);
10124 CHECK(asgnflag != -1);
10125 flag |= (
unsigned int)asgnflag;
10127 debugp_param(
"argc", argc);
10128 debugp_param(
"nd_mid",
ID2SYM(mid));
10133 else_label = qcall_branch_start(iseq, recv, &branches, node, node);
10136 ADD_INSN(ret, node, putnil);
10137 ADD_SEQ(ret, recv);
10138 ADD_SEQ(ret, args);
10140 if (flag & VM_CALL_ARGS_SPLAT) {
10141 ADD_INSN(ret, node, dup);
10142 ADD_INSN1(ret, node, putobject,
INT2FIX(-1));
10143 ADD_SEND_WITH_FLAG(ret, node, idAREF,
INT2FIX(1),
INT2FIX(asgnflag));
10144 ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 2));
10145 ADD_INSN (ret, node, pop);
10148 ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 1));
10152 ADD_SEQ(ret, recv);
10153 ADD_SEQ(ret, args);
10155 ADD_SEND_WITH_FLAG(ret, node, mid, argc,
INT2FIX(flag));
10156 qcall_branch_end(iseq, ret, else_label, branches, node, node);
10157 ADD_INSN(ret, node, pop);
10164 ADD_INSN1(ret, value, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10186 node_const_decl_val(
const NODE *node)
10189 switch (nd_type(node)) {
10191 if (RNODE_CDECL(node)->nd_vid) {
10192 path =
rb_id2str(RNODE_CDECL(node)->nd_vid);
10196 node = RNODE_CDECL(node)->nd_else;
10207 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
10213 for (; node && nd_type_p(node, NODE_COLON2); node = RNODE_COLON2(node)->nd_head) {
10216 if (node && nd_type_p(node, NODE_CONST)) {
10220 else if (node && nd_type_p(node, NODE_COLON3)) {
10232 path = rb_fstring(path);
10237 const_decl_path(
NODE *dest)
10240 if (!nd_type_p(dest, NODE_CALL)) {
10241 path = node_const_decl_val(dest);
10252 VALUE path = const_decl_path(dest);
10253 ADD_INSN1(ret, value, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10254 CHECK(COMPILE(ret,
"compile_ensure_shareable_node", value));
10255 ADD_INSN1(ret, value, putobject, path);
10262 #ifndef SHAREABLE_BARE_EXPRESSION
10263 #define SHAREABLE_BARE_EXPRESSION 1
10267 compile_shareable_literal_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
NODE *dest,
const NODE *node,
size_t level,
VALUE *value_p,
int *shareable_literal_p)
10269 # define compile_shareable_literal_constant_next(node, anchor, value_p, shareable_literal_p) \
10270 compile_shareable_literal_constant(iseq, anchor, shareable, dest, node, level+1, value_p, shareable_literal_p)
10272 DECL_ANCHOR(anchor);
10274 enum node_type
type = nd_type(node);
10286 *value_p = rb_node_sym_string_val(node);
10289 *value_p = rb_node_regx_string_val(node);
10292 *value_p = rb_node_line_lineno_val(node);
10295 *value_p = rb_node_integer_literal_val(node);
10298 *value_p = rb_node_float_literal_val(node);
10300 case NODE_RATIONAL:
10301 *value_p = rb_node_rational_literal_val(node);
10303 case NODE_IMAGINARY:
10304 *value_p = rb_node_imaginary_literal_val(node);
10306 case NODE_ENCODING:
10307 *value_p = rb_node_encoding_val(node);
10310 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10311 *shareable_literal_p = 1;
10315 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10316 if (shareable == rb_parser_shareable_literal) {
10322 ADD_SEND_WITH_FLAG(ret, node, idUMinus,
INT2FIX(0),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10325 *shareable_literal_p = 1;
10329 VALUE lit = rb_node_str_string_val(node);
10330 ADD_INSN1(ret, node, putobject, lit);
10333 *shareable_literal_p = 1;
10339 VALUE lit = rb_node_file_path_val(node);
10340 ADD_INSN1(ret, node, putobject, lit);
10343 *shareable_literal_p = 1;
10351 ADD_INSN1(ret, node, putobject, lit);
10354 *shareable_literal_p = 1;
10360 INIT_ANCHOR(anchor);
10362 for (
NODE *n = (
NODE *)node; n; n = RNODE_LIST(n)->nd_next) {
10364 int shareable_literal_p2;
10365 NODE *elt = RNODE_LIST(n)->nd_head;
10367 CHECK(compile_shareable_literal_constant_next(elt, anchor, &val, &shareable_literal_p2));
10368 if (shareable_literal_p2) {
10371 else if (
RTEST(lit)) {
10377 if (!UNDEF_P(val)) {
10389 if (!RNODE_HASH(node)->nd_brace) {
10391 *shareable_literal_p = 0;
10395 INIT_ANCHOR(anchor);
10397 for (
NODE *n = RNODE_HASH(node)->nd_head; n; n = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_next) {
10400 int shareable_literal_p2;
10401 NODE *key = RNODE_LIST(n)->nd_head;
10402 NODE *val = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head;
10404 CHECK(compile_shareable_literal_constant_next(key, anchor, &key_val, &shareable_literal_p2));
10405 if (shareable_literal_p2) {
10408 else if (
RTEST(lit)) {
10414 CHECK(compile_shareable_literal_constant_next(val, anchor, &value_val, &shareable_literal_p2));
10415 if (shareable_literal_p2) {
10418 else if (
RTEST(lit)) {
10424 if (!UNDEF_P(key_val) && !UNDEF_P(value_val)) {
10437 if (shareable == rb_parser_shareable_literal &&
10438 (SHAREABLE_BARE_EXPRESSION || level > 0)) {
10439 CHECK(compile_ensure_shareable_node(iseq, ret, dest, node));
10441 *shareable_literal_p = 1;
10444 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10446 *shareable_literal_p = 0;
10452 if (nd_type(node) == NODE_LIST) {
10453 ADD_INSN1(anchor, node, newarray,
INT2FIX(RNODE_LIST(node)->as.nd_alen));
10455 else if (nd_type(node) == NODE_HASH) {
10456 int len = (int)RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10457 ADD_INSN1(anchor, node, newhash,
INT2FIX(
len));
10460 *shareable_literal_p = 0;
10461 ADD_SEQ(ret, anchor);
10467 if (nd_type(node) == NODE_LIST) {
10468 ADD_INSN1(anchor, node, newarray,
INT2FIX(RNODE_LIST(node)->as.nd_alen));
10470 else if (nd_type(node) == NODE_HASH) {
10471 int len = (int)RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10472 ADD_INSN1(anchor, node, newhash,
INT2FIX(
len));
10474 CHECK(compile_make_shareable_node(iseq, ret, anchor, node,
false));
10476 *shareable_literal_p = 1;
10480 ADD_INSN1(ret, node, putobject, val);
10483 *shareable_literal_p = 1;
10490 compile_shareable_constant_value(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
const NODE *lhs,
const NODE *value)
10494 DECL_ANCHOR(anchor);
10495 INIT_ANCHOR(anchor);
10497 switch (shareable) {
10498 case rb_parser_shareable_none:
10499 CHECK(COMPILE(ret,
"compile_shareable_constant_value", value));
10502 case rb_parser_shareable_literal:
10503 CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (
NODE *)lhs, value, 0, &val, &literal_p));
10504 ADD_SEQ(ret, anchor);
10507 case rb_parser_shareable_copy:
10508 case rb_parser_shareable_everything:
10509 CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (
NODE *)lhs, value, 0, &val, &literal_p));
10511 CHECK(compile_make_shareable_node(iseq, ret, anchor, value, shareable == rb_parser_shareable_copy));
10514 ADD_SEQ(ret, anchor);
10518 rb_bug(
"unexpected rb_parser_shareability: %d", shareable);
10535 int lineno = ISEQ_COMPILE_DATA(iseq)->last_line;
10536 if (lineno == 0) lineno =
FIX2INT(rb_iseq_first_lineno(iseq));
10537 debugs(
"node: NODE_NIL(implicit)\n");
10538 ADD_SYNTHETIC_INSN(ret, lineno, -1, putnil);
10542 return iseq_compile_each0(iseq, ret, node, popped);
10548 const int line = (int)nd_line(node);
10549 const enum node_type
type = nd_type(node);
10552 if (ISEQ_COMPILE_DATA(iseq)->last_line == line) {
10556 if (nd_fl_newline(node)) {
10558 ISEQ_COMPILE_DATA(iseq)->last_line = line;
10559 if (ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq)) {
10560 event |= RUBY_EVENT_COVERAGE_LINE;
10562 ADD_TRACE(ret, event);
10566 debug_node_start(node);
10567 #undef BEFORE_RETURN
10568 #define BEFORE_RETURN debug_node_end()
10572 CHECK(compile_block(iseq, ret, node, popped));
10576 CHECK(compile_if(iseq, ret, node, popped,
type));
10579 CHECK(compile_case(iseq, ret, node, popped));
10582 CHECK(compile_case2(iseq, ret, node, popped));
10585 CHECK(compile_case3(iseq, ret, node, popped));
10589 CHECK(compile_loop(iseq, ret, node, popped,
type));
10593 CHECK(compile_iter(iseq, ret, node, popped));
10595 case NODE_FOR_MASGN:
10596 CHECK(compile_for_masgn(iseq, ret, node, popped));
10599 CHECK(compile_break(iseq, ret, node, popped));
10602 CHECK(compile_next(iseq, ret, node, popped));
10605 CHECK(compile_redo(iseq, ret, node, popped));
10608 CHECK(compile_retry(iseq, ret, node, popped));
10611 CHECK(COMPILE_(ret,
"NODE_BEGIN", RNODE_BEGIN(node)->nd_body, popped));
10615 CHECK(compile_rescue(iseq, ret, node, popped));
10618 CHECK(compile_resbody(iseq, ret, node, popped));
10621 CHECK(compile_ensure(iseq, ret, node, popped));
10626 LABEL *end_label = NEW_LABEL(line);
10627 CHECK(COMPILE(ret,
"nd_1st", RNODE_OR(node)->nd_1st));
10629 ADD_INSN(ret, node, dup);
10631 if (
type == NODE_AND) {
10632 ADD_INSNL(ret, node, branchunless, end_label);
10635 ADD_INSNL(ret, node, branchif, end_label);
10638 ADD_INSN(ret, node, pop);
10640 CHECK(COMPILE_(ret,
"nd_2nd", RNODE_OR(node)->nd_2nd, popped));
10641 ADD_LABEL(ret, end_label);
10646 compile_massign(iseq, ret, node, popped);
10651 ID id = RNODE_LASGN(node)->nd_vid;
10652 int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
10654 debugs(
"lvar: %s idx: %d\n",
rb_id2name(
id), idx);
10655 CHECK(COMPILE(ret,
"rvalue", RNODE_LASGN(node)->nd_value));
10658 ADD_INSN(ret, node, dup);
10660 ADD_SETLOCAL(ret, node, idx, get_lvar_level(iseq));
10665 ID id = RNODE_DASGN(node)->nd_vid;
10666 CHECK(COMPILE(ret,
"dvalue", RNODE_DASGN(node)->nd_value));
10667 debugi(
"dassn id",
rb_id2str(
id) ?
id :
'*');
10670 ADD_INSN(ret, node, dup);
10673 idx = get_dyna_var_idx(iseq,
id, &lv, &ls);
10676 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
10680 ADD_SETLOCAL(ret, node, ls - idx, lv);
10684 CHECK(COMPILE(ret,
"lvalue", RNODE_GASGN(node)->nd_value));
10687 ADD_INSN(ret, node, dup);
10689 ADD_INSN1(ret, node, setglobal,
ID2SYM(RNODE_GASGN(node)->nd_vid));
10693 CHECK(COMPILE(ret,
"lvalue", RNODE_IASGN(node)->nd_value));
10695 ADD_INSN(ret, node, dup);
10697 ADD_INSN2(ret, node, setinstancevariable,
10698 ID2SYM(RNODE_IASGN(node)->nd_vid),
10699 get_ivar_ic_value(iseq,RNODE_IASGN(node)->nd_vid));
10703 if (RNODE_CDECL(node)->nd_vid) {
10704 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
10707 ADD_INSN(ret, node, dup);
10710 ADD_INSN1(ret, node, putspecialobject,
10711 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
10712 ADD_INSN1(ret, node, setconstant,
ID2SYM(RNODE_CDECL(node)->nd_vid));
10715 compile_cpath(ret, iseq, RNODE_CDECL(node)->nd_else);
10716 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
10717 ADD_INSN(ret, node, swap);
10720 ADD_INSN1(ret, node, topn,
INT2FIX(1));
10721 ADD_INSN(ret, node, swap);
10724 ADD_INSN1(ret, node, setconstant,
ID2SYM(get_node_colon_nd_mid(RNODE_CDECL(node)->nd_else)));
10729 CHECK(COMPILE(ret,
"cvasgn val", RNODE_CVASGN(node)->nd_value));
10731 ADD_INSN(ret, node, dup);
10733 ADD_INSN2(ret, node, setclassvariable,
10734 ID2SYM(RNODE_CVASGN(node)->nd_vid),
10735 get_cvar_ic_value(iseq, RNODE_CVASGN(node)->nd_vid));
10738 case NODE_OP_ASGN1:
10739 CHECK(compile_op_asgn1(iseq, ret, node, popped));
10741 case NODE_OP_ASGN2:
10742 CHECK(compile_op_asgn2(iseq, ret, node, popped));
10744 case NODE_OP_CDECL:
10745 CHECK(compile_op_cdecl(iseq, ret, node, popped));
10747 case NODE_OP_ASGN_AND:
10748 case NODE_OP_ASGN_OR:
10749 CHECK(compile_op_log(iseq, ret, node, popped,
type));
10753 if (compile_call_precheck_freeze(iseq, ret, node, node, popped) == TRUE) {
10759 if (compile_call(iseq, ret, node,
type, node, popped,
false) == COMPILE_NG) {
10765 CHECK(compile_super(iseq, ret, node, popped,
type));
10768 CHECK(compile_array(iseq, ret, node, popped, TRUE) >= 0);
10773 ADD_INSN1(ret, node, newarray,
INT2FIX(0));
10778 CHECK(compile_hash(iseq, ret, node, FALSE, popped) >= 0);
10781 CHECK(compile_return(iseq, ret, node, popped));
10784 CHECK(compile_yield(iseq, ret, node, popped));
10788 compile_lvar(iseq, ret, node, RNODE_LVAR(node)->nd_vid);
10794 debugi(
"nd_vid", RNODE_DVAR(node)->nd_vid);
10796 idx = get_dyna_var_idx(iseq, RNODE_DVAR(node)->nd_vid, &lv, &ls);
10798 COMPILE_ERROR(ERROR_ARGS
"unknown dvar (%"PRIsVALUE
")",
10802 ADD_GETLOCAL(ret, node, ls - idx, lv);
10807 ADD_INSN1(ret, node, getglobal,
ID2SYM(RNODE_GVAR(node)->nd_vid));
10809 ADD_INSN(ret, node, pop);
10814 debugi(
"nd_vid", RNODE_IVAR(node)->nd_vid);
10816 ADD_INSN2(ret, node, getinstancevariable,
10817 ID2SYM(RNODE_IVAR(node)->nd_vid),
10818 get_ivar_ic_value(iseq, RNODE_IVAR(node)->nd_vid));
10823 debugi(
"nd_vid", RNODE_CONST(node)->nd_vid);
10825 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
10828 ADD_INSN1(ret, node, opt_getconstant_path, segments);
10832 ADD_INSN(ret, node, putnil);
10833 ADD_INSN1(ret, node, putobject,
Qtrue);
10834 ADD_INSN1(ret, node, getconstant,
ID2SYM(RNODE_CONST(node)->nd_vid));
10838 ADD_INSN(ret, node, pop);
10844 ADD_INSN2(ret, node, getclassvariable,
10845 ID2SYM(RNODE_CVAR(node)->nd_vid),
10846 get_cvar_ic_value(iseq, RNODE_CVAR(node)->nd_vid));
10850 case NODE_NTH_REF:{
10852 if (!RNODE_NTH_REF(node)->nd_nth) {
10853 ADD_INSN(ret, node, putnil);
10856 ADD_INSN2(ret, node, getspecial,
INT2FIX(1) ,
10857 INT2FIX(RNODE_NTH_REF(node)->nd_nth << 1));
10861 case NODE_BACK_REF:{
10863 ADD_INSN2(ret, node, getspecial,
INT2FIX(1) ,
10864 INT2FIX(0x01 | (RNODE_BACK_REF(node)->nd_nth << 1)));
10871 CHECK(compile_match(iseq, ret, node, popped,
type));
10875 ADD_INSN1(ret, node, putobject, rb_node_sym_string_val(node));
10881 ADD_INSN1(ret, node, putobject, rb_node_line_lineno_val(node));
10885 case NODE_ENCODING:{
10887 ADD_INSN1(ret, node, putobject, rb_node_encoding_val(node));
10891 case NODE_INTEGER:{
10892 VALUE lit = rb_node_integer_literal_val(node);
10893 debugp_param(
"integer", lit);
10895 ADD_INSN1(ret, node, putobject, lit);
10901 VALUE lit = rb_node_float_literal_val(node);
10902 debugp_param(
"float", lit);
10904 ADD_INSN1(ret, node, putobject, lit);
10909 case NODE_RATIONAL:{
10910 VALUE lit = rb_node_rational_literal_val(node);
10911 debugp_param(
"rational", lit);
10913 ADD_INSN1(ret, node, putobject, lit);
10918 case NODE_IMAGINARY:{
10919 VALUE lit = rb_node_imaginary_literal_val(node);
10920 debugp_param(
"imaginary", lit);
10922 ADD_INSN1(ret, node, putobject, lit);
10929 debugp_param(
"nd_lit", get_string_value(node));
10931 VALUE lit = get_string_value(node);
10934 option->frozen_string_literal != ISEQ_FROZEN_STRING_LITERAL_DISABLED) {
10935 lit = rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), line);
10937 switch (option->frozen_string_literal) {
10938 case ISEQ_FROZEN_STRING_LITERAL_UNSET:
10939 ADD_INSN1(ret, node, putchilledstring, lit);
10941 case ISEQ_FROZEN_STRING_LITERAL_DISABLED:
10942 ADD_INSN1(ret, node, putstring, lit);
10944 case ISEQ_FROZEN_STRING_LITERAL_ENABLED:
10945 ADD_INSN1(ret, node, putobject, lit);
10948 rb_bug(
"invalid frozen_string_literal");
10955 compile_dstr(iseq, ret, node);
10958 ADD_INSN(ret, node, pop);
10963 ADD_CALL_RECEIVER(ret, node);
10964 VALUE str = rb_node_str_string_val(node);
10965 ADD_INSN1(ret, node, putobject, str);
10967 ADD_CALL(ret, node, idBackquote,
INT2FIX(1));
10970 ADD_INSN(ret, node, pop);
10975 ADD_CALL_RECEIVER(ret, node);
10976 compile_dstr(iseq, ret, node);
10977 ADD_CALL(ret, node, idBackquote,
INT2FIX(1));
10980 ADD_INSN(ret, node, pop);
10985 CHECK(compile_evstr(iseq, ret, RNODE_EVSTR(node)->nd_body, popped));
10989 VALUE lit = rb_node_regx_string_val(node);
10990 ADD_INSN1(ret, node, putobject, lit);
10996 compile_dregx(iseq, ret, node, popped);
10999 int ic_index = body->ise_size++;
11001 block_iseq = NEW_CHILD_ISEQ(RNODE_ONCE(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_PLAIN, line);
11003 ADD_INSN2(ret, node, once, block_iseq,
INT2FIX(ic_index));
11007 ADD_INSN(ret, node, pop);
11011 case NODE_ARGSCAT:{
11013 CHECK(COMPILE(ret,
"argscat head", RNODE_ARGSCAT(node)->nd_head));
11014 ADD_INSN1(ret, node, splatarray,
Qfalse);
11015 ADD_INSN(ret, node, pop);
11016 CHECK(COMPILE(ret,
"argscat body", RNODE_ARGSCAT(node)->nd_body));
11017 ADD_INSN1(ret, node, splatarray,
Qfalse);
11018 ADD_INSN(ret, node, pop);
11021 CHECK(COMPILE(ret,
"argscat head", RNODE_ARGSCAT(node)->nd_head));
11022 const NODE *body_node = RNODE_ARGSCAT(node)->nd_body;
11023 if (nd_type_p(body_node, NODE_LIST)) {
11024 CHECK(compile_array(iseq, ret, body_node, popped, FALSE) >= 0);
11027 CHECK(COMPILE(ret,
"argscat body", body_node));
11028 ADD_INSN(ret, node, concattoarray);
11033 case NODE_ARGSPUSH:{
11035 CHECK(COMPILE(ret,
"argspush head", RNODE_ARGSPUSH(node)->nd_head));
11036 ADD_INSN1(ret, node, splatarray,
Qfalse);
11037 ADD_INSN(ret, node, pop);
11038 CHECK(COMPILE_(ret,
"argspush body", RNODE_ARGSPUSH(node)->nd_body, popped));
11041 CHECK(COMPILE(ret,
"argspush head", RNODE_ARGSPUSH(node)->nd_head));
11042 const NODE *body_node = RNODE_ARGSPUSH(node)->nd_body;
11043 if (keyword_node_p(body_node)) {
11044 CHECK(COMPILE_(ret,
"array element", body_node, FALSE));
11045 ADD_INSN(ret, node, pushtoarraykwsplat);
11047 else if (static_literal_node_p(body_node, iseq,
false)) {
11048 ADD_INSN1(ret, body_node, putobject, static_literal_value(body_node, iseq));
11049 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
11052 CHECK(COMPILE_(ret,
"array element", body_node, FALSE));
11053 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
11059 CHECK(COMPILE(ret,
"splat", RNODE_SPLAT(node)->nd_head));
11060 ADD_INSN1(ret, node, splatarray,
Qtrue);
11063 ADD_INSN(ret, node, pop);
11068 ID mid = RNODE_DEFN(node)->nd_mid;
11069 const rb_iseq_t *method_iseq = NEW_ISEQ(RNODE_DEFN(node)->nd_defn,
11071 ISEQ_TYPE_METHOD, line);
11073 debugp_param(
"defn/iseq", rb_iseqw_new(method_iseq));
11074 ADD_INSN2(ret, node, definemethod,
ID2SYM(mid), method_iseq);
11078 ADD_INSN1(ret, node, putobject,
ID2SYM(mid));
11084 ID mid = RNODE_DEFS(node)->nd_mid;
11085 const rb_iseq_t * singleton_method_iseq = NEW_ISEQ(RNODE_DEFS(node)->nd_defn,
11087 ISEQ_TYPE_METHOD, line);
11089 debugp_param(
"defs/iseq", rb_iseqw_new(singleton_method_iseq));
11090 CHECK(COMPILE(ret,
"defs: recv", RNODE_DEFS(node)->nd_recv));
11091 ADD_INSN2(ret, node, definesmethod,
ID2SYM(mid), singleton_method_iseq);
11095 ADD_INSN1(ret, node, putobject,
ID2SYM(mid));
11100 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11101 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11102 CHECK(COMPILE(ret,
"alias arg1", RNODE_ALIAS(node)->nd_1st));
11103 CHECK(COMPILE(ret,
"alias arg2", RNODE_ALIAS(node)->nd_2nd));
11104 ADD_SEND(ret, node, id_core_set_method_alias,
INT2FIX(3));
11107 ADD_INSN(ret, node, pop);
11112 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11113 ADD_INSN1(ret, node, putobject,
ID2SYM(RNODE_VALIAS(node)->nd_alias));
11114 ADD_INSN1(ret, node, putobject,
ID2SYM(RNODE_VALIAS(node)->nd_orig));
11115 ADD_SEND(ret, node, id_core_set_variable_alias,
INT2FIX(2));
11118 ADD_INSN(ret, node, pop);
11125 for (
long i = 0; i < ary->len; i++) {
11126 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11127 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11128 CHECK(COMPILE(ret,
"undef arg", ary->data[i]));
11129 ADD_SEND(ret, node, id_core_undef_method,
INT2FIX(2));
11131 if (i < ary->
len - 1) {
11132 ADD_INSN(ret, node, pop);
11137 ADD_INSN(ret, node, pop);
11142 const rb_iseq_t *class_iseq = NEW_CHILD_ISEQ(RNODE_CLASS(node)->nd_body,
11144 ISEQ_TYPE_CLASS, line);
11145 const int flags = VM_DEFINECLASS_TYPE_CLASS |
11146 (RNODE_CLASS(node)->nd_super ? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS : 0) |
11147 compile_cpath(ret, iseq, RNODE_CLASS(node)->nd_cpath);
11149 CHECK(COMPILE(ret,
"super", RNODE_CLASS(node)->nd_super));
11150 ADD_INSN3(ret, node, defineclass,
ID2SYM(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)), class_iseq,
INT2FIX(flags));
11154 ADD_INSN(ret, node, pop);
11159 const rb_iseq_t *module_iseq = NEW_CHILD_ISEQ(RNODE_MODULE(node)->nd_body,
11161 ISEQ_TYPE_CLASS, line);
11162 const int flags = VM_DEFINECLASS_TYPE_MODULE |
11163 compile_cpath(ret, iseq, RNODE_MODULE(node)->nd_cpath);
11165 ADD_INSN (ret, node, putnil);
11166 ADD_INSN3(ret, node, defineclass,
ID2SYM(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)), module_iseq,
INT2FIX(flags));
11170 ADD_INSN(ret, node, pop);
11176 const rb_iseq_t *singleton_class = NEW_ISEQ(RNODE_SCLASS(node)->nd_body, rb_fstring_lit(
"singleton class"),
11177 ISEQ_TYPE_CLASS, line);
11179 CHECK(COMPILE(ret,
"sclass#recv", RNODE_SCLASS(node)->nd_recv));
11180 ADD_INSN (ret, node, putnil);
11181 CONST_ID(singletonclass,
"singletonclass");
11182 ADD_INSN3(ret, node, defineclass,
11183 ID2SYM(singletonclass), singleton_class,
11184 INT2FIX(VM_DEFINECLASS_TYPE_SINGLETON_CLASS));
11188 ADD_INSN(ret, node, pop);
11193 CHECK(compile_colon2(iseq, ret, node, popped));
11196 CHECK(compile_colon3(iseq, ret, node, popped));
11199 CHECK(compile_dots(iseq, ret, node, popped, FALSE));
11202 CHECK(compile_dots(iseq, ret, node, popped, TRUE));
11206 LABEL *lend = NEW_LABEL(line);
11207 LABEL *ltrue = NEW_LABEL(line);
11208 LABEL *lfalse = NEW_LABEL(line);
11209 CHECK(compile_flip_flop(iseq, ret, node,
type == NODE_FLIP2,
11211 ADD_LABEL(ret, ltrue);
11212 ADD_INSN1(ret, node, putobject,
Qtrue);
11213 ADD_INSNL(ret, node, jump, lend);
11214 ADD_LABEL(ret, lfalse);
11215 ADD_INSN1(ret, node, putobject,
Qfalse);
11216 ADD_LABEL(ret, lend);
11221 ADD_INSN(ret, node, putself);
11227 ADD_INSN(ret, node, putnil);
11233 ADD_INSN1(ret, node, putobject,
Qtrue);
11239 ADD_INSN1(ret, node, putobject,
Qfalse);
11244 CHECK(compile_errinfo(iseq, ret, node, popped));
11248 CHECK(compile_defined_expr(iseq, ret, node,
Qtrue,
false));
11251 case NODE_POSTEXE:{
11255 int is_index = body->ise_size++;
11257 rb_iseq_new_with_callback_new_callback(build_postexe_iseq, RNODE_POSTEXE(node)->nd_body);
11259 new_child_iseq_with_callback(iseq, ifunc,
11260 rb_fstring(make_name_for_block(iseq)), iseq, ISEQ_TYPE_BLOCK, line);
11262 ADD_INSN2(ret, node, once, once_iseq,
INT2FIX(is_index));
11266 ADD_INSN(ret, node, pop);
11271 CHECK(compile_kw_arg(iseq, ret, node, popped));
11274 compile_dstr(iseq, ret, node);
11276 ADD_INSN(ret, node, intern);
11279 ADD_INSN(ret, node, pop);
11283 case NODE_ATTRASGN:
11284 CHECK(compile_attrasgn(iseq, ret, node, popped));
11288 const rb_iseq_t *block = NEW_CHILD_ISEQ(RNODE_LAMBDA(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, line);
11291 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11292 ADD_CALL_WITH_BLOCK(ret, node, idLambda, argc, block);
11296 ADD_INSN(ret, node, pop);
11301 UNKNOWN_NODE(
"iseq_compile_each", node, COMPILE_NG);
11316 insn_data_length(
INSN *iobj)
11318 return insn_len(iobj->insn_id);
11322 calc_sp_depth(
int depth,
INSN *insn)
11324 return comptime_insn_stack_increase(depth, insn->insn_id, insn->operands);
11328 opobj_inspect(
VALUE obj)
11348 insn_data_to_s_detail(
INSN *iobj)
11352 if (iobj->operands) {
11353 const char *types = insn_op_types(iobj->insn_id);
11356 for (j = 0; types[j]; j++) {
11357 char type = types[j];
11381 VALUE v = OPERAND_AT(iobj, j);
11417 void *func = (
void *)OPERAND_AT(iobj, j);
11420 if (dladdr(func, &info) && info.dli_sname) {
11435 if (types[j + 1]) {
11446 dump_disasm_list_with_cursor(link, NULL, NULL);
11457 printf(
"-- raw disasm--------\n");
11460 if (curr) printf(curr == link ?
"*" :
" ");
11461 switch (link->type) {
11462 case ISEQ_ELEMENT_INSN:
11464 iobj = (
INSN *)link;
11465 str = insn_data_to_s_detail(iobj);
11466 printf(
" %04d %-65s(%4u)\n", pos,
StringValueCStr(str), iobj->insn_info.line_no);
11467 pos += insn_data_length(iobj);
11470 case ISEQ_ELEMENT_LABEL:
11472 lobj = (
LABEL *)link;
11473 printf(LABEL_FORMAT
" [sp: %d, unremovable: %d, refcnt: %d]%s\n", lobj->label_no, lobj->sp, lobj->unremovable, lobj->refcnt,
11474 dest == lobj ?
" <---" :
"");
11477 case ISEQ_ELEMENT_TRACE:
11480 printf(
" trace: %0x\n", trace->event);
11483 case ISEQ_ELEMENT_ADJUST:
11486 printf(
" adjust: [label: %d]\n", adjust->label ? adjust->label->label_no : -1);
11495 printf(
"---------------------\n");
11500 rb_insn_len(
VALUE insn)
11502 return insn_len(insn);
11506 rb_insns_name(
int i)
11508 return insn_name(i);
11512 rb_insns_name_array(
void)
11516 for (i = 0; i < VM_INSTRUCTION_SIZE; i++) {
11527 obj = rb_to_symbol_type(obj);
11529 if (st_lookup(labels_table, obj, &tmp) == 0) {
11530 label = NEW_LABEL(0);
11531 st_insert(labels_table, obj, (st_data_t)label);
11534 label = (
LABEL *)tmp;
11541 get_exception_sym2type(
VALUE sym)
11543 static VALUE symRescue, symEnsure, symRetry;
11544 static VALUE symBreak, symRedo, symNext;
11546 if (symRescue == 0) {
11555 if (sym == symRescue)
return CATCH_TYPE_RESCUE;
11556 if (sym == symEnsure)
return CATCH_TYPE_ENSURE;
11557 if (sym == symRetry)
return CATCH_TYPE_RETRY;
11558 if (sym == symBreak)
return CATCH_TYPE_BREAK;
11559 if (sym == symRedo)
return CATCH_TYPE_REDO;
11560 if (sym == symNext)
return CATCH_TYPE_NEXT;
11574 LABEL *lstart, *lend, *lcont;
11589 lstart = register_label(iseq, labels_table,
RARRAY_AREF(v, 2));
11590 lend = register_label(iseq, labels_table,
RARRAY_AREF(v, 3));
11591 lcont = register_label(iseq, labels_table,
RARRAY_AREF(v, 4));
11595 if (
type == CATCH_TYPE_RESCUE ||
11596 type == CATCH_TYPE_BREAK ||
11597 type == CATCH_TYPE_NEXT) {
11603 ADD_CATCH_ENTRY(
type, lstart, lend, eiseq, lcont);
11611 insn_make_insn_table(
void)
11615 table = st_init_numtable_with_size(VM_INSTRUCTION_SIZE);
11617 for (i=0; i<VM_INSTRUCTION_SIZE; i++) {
11631 iseqw = rb_iseq_load(op, (
VALUE)iseq,
Qnil);
11633 else if (
CLASS_OF(op) == rb_cISeq) {
11640 loaded_iseq = rb_iseqw_to_iseq(iseqw);
11641 return loaded_iseq;
11649 unsigned int flag = 0;
11660 if (!
NIL_P(vorig_argc)) orig_argc =
FIX2INT(vorig_argc);
11662 if (!
NIL_P(vkw_arg)) {
11665 size_t n = rb_callinfo_kwarg_bytes(
len);
11668 kw_arg->references = 0;
11669 kw_arg->keyword_len =
len;
11670 for (i = 0; i <
len; i++) {
11673 kw_arg->keywords[i] = kw;
11678 const struct rb_callinfo *ci = new_callinfo(iseq, mid, orig_argc, flag, kw_arg, (flag & VM_CALL_ARGS_SIMPLE) == 0);
11684 event_name_to_flag(
VALUE sym)
11686 #define CHECK_EVENT(ev) if (sym == ID2SYM(rb_intern_const(#ev))) return ev;
11707 int line_no = 0, node_id = -1, insn_idx = 0;
11708 int ret = COMPILE_OK;
11713 static struct st_table *insn_table;
11715 if (insn_table == 0) {
11716 insn_table = insn_make_insn_table();
11719 for (i=0; i<
len; i++) {
11725 ADD_TRACE(anchor, event);
11728 LABEL *label = register_label(iseq, labels_table, obj);
11729 ADD_LABEL(anchor, label);
11746 if (st_lookup(insn_table, (st_data_t)insn, &insn_id) == 0) {
11748 COMPILE_ERROR(iseq, line_no,
11749 "unknown instruction: %+"PRIsVALUE, insn);
11754 if (argc != insn_len((
VALUE)insn_id)-1) {
11755 COMPILE_ERROR(iseq, line_no,
11756 "operand size mismatch");
11762 argv = compile_data_calloc2(iseq,
sizeof(
VALUE), argc);
11767 (
enum ruby_vminsn_type)insn_id, argc, argv));
11769 for (j=0; j<argc; j++) {
11771 switch (insn_op_type((
VALUE)insn_id, j)) {
11773 LABEL *label = register_label(iseq, labels_table, op);
11774 argv[j] = (
VALUE)label;
11789 VALUE v = (
VALUE)iseq_build_load_iseq(iseq, op);
11800 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->ise_size) {
11801 ISEQ_BODY(iseq)->ise_size =
NUM2INT(op) + 1;
11807 op = rb_to_array_type(op);
11811 sym = rb_to_symbol_type(sym);
11816 argv[j] = segments;
11818 ISEQ_BODY(iseq)->ic_size++;
11823 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->ivc_size) {
11824 ISEQ_BODY(iseq)->ivc_size =
NUM2INT(op) + 1;
11829 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->icvarc_size) {
11830 ISEQ_BODY(iseq)->icvarc_size =
NUM2INT(op) + 1;
11834 argv[j] = iseq_build_callinfo_from_hash(iseq, op);
11837 argv[j] = rb_to_symbol_type(op);
11844 RHASH_TBL_RAW(map)->type = &cdhash_type;
11845 op = rb_to_array_type(op);
11850 register_label(iseq, labels_table, sym);
11860 #if SIZEOF_VALUE <= SIZEOF_LONG
11865 argv[j] = (
VALUE)funcptr;
11876 (
enum ruby_vminsn_type)insn_id, argc, NULL));
11885 validate_labels(iseq, labels_table);
11886 if (!ret)
return ret;
11887 return iseq_setup(iseq, anchor);
11890 #define CHECK_ARRAY(v) rb_to_array_type(v)
11891 #define CHECK_SYMBOL(v) rb_to_symbol_type(v)
11901 else if (!
NIL_P(val)) {
11908 static const struct rb_iseq_param_keyword *
11914 VALUE key, sym, default_val;
11917 struct rb_iseq_param_keyword *keyword =
ZALLOC(
struct rb_iseq_param_keyword);
11919 ISEQ_BODY(iseq)->param.flags.has_kw = TRUE;
11921 keyword->num =
len;
11922 #define SYM(s) ID2SYM(rb_intern_const(#s))
11923 (void)int_param(&keyword->bits_start, params, SYM(kwbits));
11924 i = keyword->bits_start - keyword->num;
11925 ids = (
ID *)&ISEQ_BODY(iseq)->local_table[i];
11929 for (i = 0; i <
len; i++) {
11933 goto default_values;
11936 keyword->required_num++;
11940 default_len =
len - i;
11941 if (default_len == 0) {
11942 keyword->table = ids;
11945 else if (default_len < 0) {
11951 for (j = 0; i <
len; i++, j++) {
11971 keyword->table = ids;
11972 keyword->default_values = dvs;
11978 iseq_insn_each_object_mark_and_pin(
VALUE obj,
VALUE _)
11987 size_t size =
sizeof(
INSN);
11988 unsigned int pos = 0;
11991 #ifdef STRICT_ALIGNMENT
11992 size_t padding = calc_padding((
void *)&storage->buff[pos], size);
11994 const size_t padding = 0;
11996 size_t offset = pos + size + padding;
11997 if (offset > storage->size || offset > storage->pos) {
11999 storage = storage->next;
12002 #ifdef STRICT_ALIGNMENT
12003 pos += (int)padding;
12006 iobj = (
INSN *)&storage->buff[pos];
12008 if (iobj->operands) {
12009 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_mark_and_pin, (
VALUE)0);
12022 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED,
12029 #define SYM(s) ID2SYM(rb_intern_const(#s))
12031 unsigned int arg_size, local_size, stack_max;
12033 struct st_table *labels_table = st_init_numtable();
12038 DECL_ANCHOR(anchor);
12039 INIT_ANCHOR(anchor);
12042 ISEQ_BODY(iseq)->local_table_size =
len;
12043 ISEQ_BODY(iseq)->local_table = tbl =
len > 0 ? (
ID *)
ALLOC_N(
ID, ISEQ_BODY(iseq)->local_table_size) : NULL;
12045 for (i = 0; i <
len; i++) {
12048 if (sym_arg_rest == lv) {
12056 #define INT_PARAM(F) int_param(&ISEQ_BODY(iseq)->param.F, params, SYM(F))
12057 if (INT_PARAM(lead_num)) {
12058 ISEQ_BODY(iseq)->param.flags.has_lead = TRUE;
12060 if (INT_PARAM(post_num)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12061 if (INT_PARAM(post_start)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12062 if (INT_PARAM(rest_start)) ISEQ_BODY(iseq)->param.flags.has_rest = TRUE;
12063 if (INT_PARAM(block_start)) ISEQ_BODY(iseq)->param.flags.has_block = TRUE;
12066 #define INT_PARAM(F) F = (int_param(&x, misc, SYM(F)) ? (unsigned int)x : 0)
12068 INT_PARAM(arg_size);
12069 INT_PARAM(local_size);
12070 INT_PARAM(stack_max);
12075 #ifdef USE_ISEQ_NODE_ID
12084 ISEQ_BODY(iseq)->param.flags.has_opt = !!(
len - 1 >= 0);
12086 if (ISEQ_BODY(iseq)->param.flags.has_opt) {
12089 for (i = 0; i <
len; i++) {
12091 LABEL *label = register_label(iseq, labels_table, ent);
12092 opt_table[i] = (
VALUE)label;
12095 ISEQ_BODY(iseq)->param.opt_num =
len - 1;
12096 ISEQ_BODY(iseq)->param.opt_table = opt_table;
12099 else if (!
NIL_P(arg_opt_labels)) {
12105 ISEQ_BODY(iseq)->param.keyword = iseq_build_kw(iseq, params, keywords);
12107 else if (!
NIL_P(keywords)) {
12113 ISEQ_BODY(iseq)->param.flags.ambiguous_param0 = TRUE;
12117 ISEQ_BODY(iseq)->param.flags.use_block = TRUE;
12120 if (int_param(&i, params, SYM(kwrest))) {
12121 struct rb_iseq_param_keyword *keyword = (
struct rb_iseq_param_keyword *)ISEQ_BODY(iseq)->param.keyword;
12122 if (keyword == NULL) {
12123 ISEQ_BODY(iseq)->param.keyword = keyword =
ZALLOC(
struct rb_iseq_param_keyword);
12125 keyword->rest_start = i;
12126 ISEQ_BODY(iseq)->param.flags.has_kwrest = TRUE;
12129 iseq_calc_param_size(iseq);
12132 iseq_build_from_ary_exception(iseq, labels_table, exception);
12135 iseq_build_from_ary_body(iseq, anchor, body, node_ids, labels_wrapper);
12137 ISEQ_BODY(iseq)->param.size = arg_size;
12138 ISEQ_BODY(iseq)->local_table_size = local_size;
12139 ISEQ_BODY(iseq)->stack_max = stack_max;
12149 while (body->type == ISEQ_TYPE_BLOCK ||
12150 body->type == ISEQ_TYPE_RESCUE ||
12151 body->type == ISEQ_TYPE_ENSURE ||
12152 body->type == ISEQ_TYPE_EVAL ||
12153 body->type == ISEQ_TYPE_MAIN
12157 for (i = 0; i < body->local_table_size; i++) {
12158 if (body->local_table[i] ==
id) {
12162 iseq = body->parent_iseq;
12163 body = ISEQ_BODY(iseq);
12176 for (i=0; i<body->local_table_size; i++) {
12177 if (body->local_table[i] ==
id) {
12187 #ifndef IBF_ISEQ_DEBUG
12188 #define IBF_ISEQ_DEBUG 0
12191 #ifndef IBF_ISEQ_ENABLE_LOCAL_BUFFER
12192 #define IBF_ISEQ_ENABLE_LOCAL_BUFFER 0
12195 typedef uint32_t ibf_offset_t;
12196 #define IBF_OFFSET(ptr) ((ibf_offset_t)(VALUE)(ptr))
12198 #define IBF_MAJOR_VERSION ISEQ_MAJOR_VERSION
12200 #define IBF_DEVEL_VERSION 4
12201 #define IBF_MINOR_VERSION (ISEQ_MINOR_VERSION * 10000 + IBF_DEVEL_VERSION)
12203 #define IBF_MINOR_VERSION ISEQ_MINOR_VERSION
12206 static const char IBF_ENDIAN_MARK =
12207 #ifdef WORDS_BIGENDIAN
12216 uint32_t major_version;
12217 uint32_t minor_version;
12219 uint32_t extra_size;
12221 uint32_t iseq_list_size;
12222 uint32_t global_object_list_size;
12223 ibf_offset_t iseq_list_offset;
12224 ibf_offset_t global_object_list_offset;
12245 unsigned int obj_list_size;
12246 ibf_offset_t obj_list_offset;
12265 pinned_list_mark(
void *
ptr)
12269 for (i = 0; i < list->size; i++) {
12270 if (list->buffer[i]) {
12283 0, 0, RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_EMBEDDABLE
12287 pinned_list_fetch(
VALUE list,
long offset)
12293 if (offset >=
ptr->size) {
12297 return ptr->buffer[offset];
12301 pinned_list_store(
VALUE list,
long offset,
VALUE object)
12307 if (offset >=
ptr->size) {
12315 pinned_list_new(
long size)
12317 size_t memsize = offsetof(
struct pinned_list, buffer) + size *
sizeof(
VALUE);
12324 static ibf_offset_t
12325 ibf_dump_pos(
struct ibf_dump *dump)
12327 long pos =
RSTRING_LEN(dump->current_buffer->str);
12328 #if SIZEOF_LONG > SIZEOF_INT
12329 if (pos >= UINT_MAX) {
12333 return (
unsigned int)pos;
12337 ibf_dump_align(
struct ibf_dump *dump,
size_t align)
12339 ibf_offset_t pos = ibf_dump_pos(dump);
12341 static const char padding[
sizeof(
VALUE)];
12342 size_t size = align - ((size_t)pos % align);
12343 #if SIZEOF_LONG > SIZEOF_INT
12344 if (pos + size >= UINT_MAX) {
12348 for (; size >
sizeof(padding); size -=
sizeof(padding)) {
12349 rb_str_cat(dump->current_buffer->str, padding,
sizeof(padding));
12351 rb_str_cat(dump->current_buffer->str, padding, size);
12355 static ibf_offset_t
12356 ibf_dump_write(
struct ibf_dump *dump,
const void *buff,
unsigned long size)
12358 ibf_offset_t pos = ibf_dump_pos(dump);
12359 rb_str_cat(dump->current_buffer->str, (
const char *)buff, size);
12364 static ibf_offset_t
12365 ibf_dump_write_byte(
struct ibf_dump *dump,
unsigned char byte)
12367 return ibf_dump_write(dump, &
byte,
sizeof(
unsigned char));
12371 ibf_dump_overwrite(
struct ibf_dump *dump,
void *buff,
unsigned int size,
long offset)
12373 VALUE str = dump->current_buffer->str;
12375 if ((
unsigned long)(size + offset) > (
unsigned long)
RSTRING_LEN(str))
12376 rb_bug(
"ibf_dump_overwrite: overflow");
12377 memcpy(
ptr + offset, buff, size);
12380 static const void *
12381 ibf_load_ptr(
const struct ibf_load *load, ibf_offset_t *offset,
int size)
12383 ibf_offset_t beg = *offset;
12385 return load->current_buffer->buff + beg;
12389 ibf_load_alloc(
const struct ibf_load *load, ibf_offset_t offset,
size_t x,
size_t y)
12392 size_t size = x * y;
12393 memcpy(buff, load->current_buffer->buff + offset, size);
12397 #define IBF_W_ALIGN(type) (RUBY_ALIGNOF(type) > 1 ? ibf_dump_align(dump, RUBY_ALIGNOF(type)) : (void)0)
12399 #define IBF_W(b, type, n) (IBF_W_ALIGN(type), (type *)(VALUE)IBF_WP(b, type, n))
12400 #define IBF_WV(variable) ibf_dump_write(dump, &(variable), sizeof(variable))
12401 #define IBF_WP(b, type, n) ibf_dump_write(dump, (b), sizeof(type) * (n))
12402 #define IBF_R(val, type, n) (type *)ibf_load_alloc(load, IBF_OFFSET(val), sizeof(type), (n))
12403 #define IBF_ZERO(variable) memset(&(variable), 0, sizeof(variable))
12406 ibf_table_lookup(
struct st_table *table, st_data_t key)
12410 if (st_lookup(table, key, &val)) {
12419 ibf_table_find_or_insert(
struct st_table *table, st_data_t key)
12421 int index = ibf_table_lookup(table, key);
12424 index = (int)table->num_entries;
12425 st_insert(table, key, (st_data_t)index);
12433 static void ibf_dump_object_list(
struct ibf_dump *dump, ibf_offset_t *obj_list_offset,
unsigned int *obj_list_size);
12439 ibf_dump_object_table_new(
void)
12441 st_table *obj_table = st_init_numtable();
12442 st_insert(obj_table, (st_data_t)
Qnil, (st_data_t)0);
12450 return ibf_table_find_or_insert(dump->current_buffer->obj_table, (st_data_t)obj);
12459 return ibf_dump_object(dump,
rb_id2sym(
id));
12463 ibf_load_id(
const struct ibf_load *load,
const ID id_index)
12465 if (id_index == 0) {
12468 VALUE sym = ibf_load_object(load, id_index);
12478 static ibf_offset_t ibf_dump_iseq_each(
struct ibf_dump *dump,
const rb_iseq_t *iseq);
12483 if (iseq == NULL) {
12487 return ibf_table_find_or_insert(dump->iseq_table, (st_data_t)iseq);
12491 static unsigned char
12492 ibf_load_byte(
const struct ibf_load *load, ibf_offset_t *offset)
12495 return (
unsigned char)load->current_buffer->buff[(*offset)++];
12511 if (
sizeof(
VALUE) > 8 || CHAR_BIT != 8) {
12512 ibf_dump_write(dump, &x,
sizeof(
VALUE));
12516 enum { max_byte_length =
sizeof(
VALUE) + 1 };
12518 unsigned char bytes[max_byte_length];
12521 for (n = 0; n <
sizeof(
VALUE) && (x >> (7 - n)); n++, x >>= 8) {
12522 bytes[max_byte_length - 1 - n] = (
unsigned char)x;
12528 bytes[max_byte_length - 1 - n] = (
unsigned char)x;
12531 ibf_dump_write(dump, bytes + max_byte_length - n, n);
12535 ibf_load_small_value(
const struct ibf_load *load, ibf_offset_t *offset)
12537 if (
sizeof(
VALUE) > 8 || CHAR_BIT != 8) {
12538 union {
char s[
sizeof(
VALUE)];
VALUE v; } x;
12540 memcpy(x.s, load->current_buffer->buff + *offset,
sizeof(
VALUE));
12541 *offset +=
sizeof(
VALUE);
12546 enum { max_byte_length =
sizeof(
VALUE) + 1 };
12548 const unsigned char *buffer = (
const unsigned char *)load->current_buffer->buff;
12549 const unsigned char c = buffer[*offset];
12553 c == 0 ? 9 : ntz_int32(c) + 1;
12556 if (*offset + n > load->current_buffer->size) {
12561 for (i = 1; i < n; i++) {
12563 x |= (
VALUE)buffer[*offset + i];
12577 ibf_dump_write_small_value(dump, (
VALUE)bf->index);
12579 size_t len = strlen(bf->name);
12580 ibf_dump_write_small_value(dump, (
VALUE)
len);
12581 ibf_dump_write(dump, bf->name,
len);
12585 ibf_load_builtin(
const struct ibf_load *load, ibf_offset_t *offset)
12587 int i = (int)ibf_load_small_value(load, offset);
12588 int len = (int)ibf_load_small_value(load, offset);
12589 const char *name = (
char *)ibf_load_ptr(load, offset,
len);
12592 fprintf(stderr,
"%.*s!!\n",
len, name);
12597 if (strncmp(table[i].name, name,
len) != 0) {
12598 rb_raise(
rb_eArgError,
"builtin function index (%d) mismatch (expect %s but %s)", i, name, table[i].name);
12605 static ibf_offset_t
12609 const int iseq_size = body->iseq_size;
12611 const VALUE *orig_code = rb_iseq_original_iseq(iseq);
12613 ibf_offset_t offset = ibf_dump_pos(dump);
12615 for (code_index=0; code_index<iseq_size;) {
12616 const VALUE insn = orig_code[code_index++];
12617 const char *types = insn_op_types(insn);
12622 ibf_dump_write_small_value(dump, insn);
12625 for (op_index=0; types[op_index]; op_index++, code_index++) {
12626 VALUE op = orig_code[code_index];
12629 switch (types[op_index]) {
12632 wv = ibf_dump_object(dump, op);
12641 wv = ibf_dump_object(dump, arr);
12649 wv = is - ISEQ_IS_ENTRY_START(body, types[op_index]);
12657 wv = ibf_dump_id(dump, (
ID)op);
12669 ibf_dump_write_small_value(dump, wv);
12679 ibf_load_code(
const struct ibf_load *load,
rb_iseq_t *iseq, ibf_offset_t bytecode_offset, ibf_offset_t bytecode_size,
unsigned int iseq_size)
12682 unsigned int code_index;
12683 ibf_offset_t reading_pos = bytecode_offset;
12687 struct rb_call_data *cd_entries = load_body->call_data;
12690 iseq_bits_t * mark_offset_bits;
12692 iseq_bits_t tmp[1] = {0};
12694 if (ISEQ_MBITS_BUFLEN(iseq_size) == 1) {
12695 mark_offset_bits = tmp;
12698 mark_offset_bits =
ZALLOC_N(iseq_bits_t, ISEQ_MBITS_BUFLEN(iseq_size));
12700 bool needs_bitmap =
false;
12702 for (code_index=0; code_index<iseq_size;) {
12704 const VALUE insn = code[code_index] = ibf_load_small_value(load, &reading_pos);
12705 const char *types = insn_op_types(insn);
12711 for (op_index=0; types[op_index]; op_index++, code_index++) {
12712 const char operand_type = types[op_index];
12713 switch (operand_type) {
12716 VALUE op = ibf_load_small_value(load, &reading_pos);
12717 VALUE v = ibf_load_object(load, op);
12718 code[code_index] = v;
12721 ISEQ_MBITS_SET(mark_offset_bits, code_index);
12722 needs_bitmap =
true;
12728 VALUE op = ibf_load_small_value(load, &reading_pos);
12729 VALUE v = ibf_load_object(load, op);
12731 RHASH_TBL_RAW(v)->type = &cdhash_type;
12733 freeze_hide_obj(v);
12738 pinned_list_store(load->current_buffer->obj_list, (
long)op, v);
12740 code[code_index] = v;
12741 ISEQ_MBITS_SET(mark_offset_bits, code_index);
12743 needs_bitmap =
true;
12748 VALUE op = (
VALUE)ibf_load_small_value(load, &reading_pos);
12750 code[code_index] = v;
12753 ISEQ_MBITS_SET(mark_offset_bits, code_index);
12754 needs_bitmap =
true;
12760 VALUE op = ibf_load_small_value(load, &reading_pos);
12761 VALUE arr = ibf_load_object(load, op);
12763 IC ic = &ISEQ_IS_IC_ENTRY(load_body, ic_index++);
12764 ic->
segments = array_to_idlist(arr);
12766 code[code_index] = (
VALUE)ic;
12773 unsigned int op = (
unsigned int)ibf_load_small_value(load, &reading_pos);
12775 ISE ic = ISEQ_IS_ENTRY_START(load_body, operand_type) + op;
12776 code[code_index] = (
VALUE)ic;
12778 if (operand_type == TS_IVC) {
12781 if (insn == BIN(setinstancevariable)) {
12782 ID iv_name = (
ID)code[code_index - 1];
12783 cache->iv_set_name = iv_name;
12786 cache->iv_set_name = 0;
12789 vm_ic_attr_index_initialize(cache, INVALID_SHAPE_ID);
12796 code[code_index] = (
VALUE)cd_entries++;
12801 VALUE op = ibf_load_small_value(load, &reading_pos);
12802 code[code_index] = ibf_load_id(load, (
ID)(
VALUE)op);
12809 code[code_index] = (
VALUE)ibf_load_builtin(load, &reading_pos);
12812 code[code_index] = ibf_load_small_value(load, &reading_pos);
12816 if (insn_len(insn) != op_index+1) {
12821 load_body->iseq_encoded = code;
12822 load_body->iseq_size = code_index;
12824 if (ISEQ_MBITS_BUFLEN(load_body->iseq_size) == 1) {
12825 load_body->mark_bits.single = mark_offset_bits[0];
12828 if (needs_bitmap) {
12829 load_body->mark_bits.list = mark_offset_bits;
12832 load_body->mark_bits.list = 0;
12838 RUBY_ASSERT(reading_pos == bytecode_offset + bytecode_size);
12842 static ibf_offset_t
12845 int opt_num = ISEQ_BODY(iseq)->param.opt_num;
12848 IBF_W_ALIGN(
VALUE);
12849 return ibf_dump_write(dump, ISEQ_BODY(iseq)->param.opt_table,
sizeof(
VALUE) * (opt_num + 1));
12852 return ibf_dump_pos(dump);
12857 ibf_load_param_opt_table(
const struct ibf_load *load, ibf_offset_t opt_table_offset,
int opt_num)
12861 MEMCPY(table, load->current_buffer->buff + opt_table_offset,
VALUE, opt_num+1);
12869 static ibf_offset_t
12872 const struct rb_iseq_param_keyword *kw = ISEQ_BODY(iseq)->param.keyword;
12875 struct rb_iseq_param_keyword dump_kw = *kw;
12876 int dv_num = kw->num - kw->required_num;
12881 for (i=0; i<kw->num; i++) ids[i] = (
ID)ibf_dump_id(dump, kw->table[i]);
12882 for (i=0; i<dv_num; i++) dvs[i] = (
VALUE)ibf_dump_object(dump, kw->default_values[i]);
12884 dump_kw.table = IBF_W(ids,
ID, kw->num);
12885 dump_kw.default_values = IBF_W(dvs,
VALUE, dv_num);
12886 IBF_W_ALIGN(
struct rb_iseq_param_keyword);
12887 return ibf_dump_write(dump, &dump_kw,
sizeof(
struct rb_iseq_param_keyword) * 1);
12894 static const struct rb_iseq_param_keyword *
12895 ibf_load_param_keyword(
const struct ibf_load *load, ibf_offset_t param_keyword_offset)
12897 if (param_keyword_offset) {
12898 struct rb_iseq_param_keyword *kw = IBF_R(param_keyword_offset,
struct rb_iseq_param_keyword, 1);
12899 int dv_num = kw->num - kw->required_num;
12900 VALUE *dvs = dv_num ? IBF_R(kw->default_values,
VALUE, dv_num) : NULL;
12903 for (i=0; i<dv_num; i++) {
12904 dvs[i] = ibf_load_object(load, dvs[i]);
12910 kw->default_values = dvs;
12918 static ibf_offset_t
12921 ibf_offset_t offset = ibf_dump_pos(dump);
12925 for (i = 0; i < ISEQ_BODY(iseq)->insns_info.size; i++) {
12926 ibf_dump_write_small_value(dump, entries[i].line_no);
12927 #ifdef USE_ISEQ_NODE_ID
12928 ibf_dump_write_small_value(dump, entries[i].node_id);
12930 ibf_dump_write_small_value(dump, entries[i].events);
12937 ibf_load_insns_info_body(
const struct ibf_load *load, ibf_offset_t body_offset,
unsigned int size)
12939 ibf_offset_t reading_pos = body_offset;
12943 for (i = 0; i < size; i++) {
12944 entries[i].line_no = (int)ibf_load_small_value(load, &reading_pos);
12945 #ifdef USE_ISEQ_NODE_ID
12946 entries[i].node_id = (int)ibf_load_small_value(load, &reading_pos);
12948 entries[i].events = (
rb_event_flag_t)ibf_load_small_value(load, &reading_pos);
12954 static ibf_offset_t
12955 ibf_dump_insns_info_positions(
struct ibf_dump *dump,
const unsigned int *positions,
unsigned int size)
12957 ibf_offset_t offset = ibf_dump_pos(dump);
12959 unsigned int last = 0;
12961 for (i = 0; i < size; i++) {
12962 ibf_dump_write_small_value(dump, positions[i] - last);
12963 last = positions[i];
12969 static unsigned int *
12970 ibf_load_insns_info_positions(
const struct ibf_load *load, ibf_offset_t positions_offset,
unsigned int size)
12972 ibf_offset_t reading_pos = positions_offset;
12973 unsigned int *positions =
ALLOC_N(
unsigned int, size);
12975 unsigned int last = 0;
12977 for (i = 0; i < size; i++) {
12978 positions[i] = last + (
unsigned int)ibf_load_small_value(load, &reading_pos);
12979 last = positions[i];
12985 static ibf_offset_t
12989 const int size = body->local_table_size;
12993 for (i=0; i<size; i++) {
12994 VALUE v = ibf_dump_id(dump, body->local_table[i]);
12997 v = ibf_dump_object(dump,
ULONG2NUM(body->local_table[i]));
13003 return ibf_dump_write(dump, table,
sizeof(
ID) * size);
13007 ibf_load_local_table(
const struct ibf_load *load, ibf_offset_t local_table_offset,
int size)
13010 ID *table = IBF_R(local_table_offset,
ID, size);
13013 for (i=0; i<size; i++) {
13014 table[i] = ibf_load_id(load, table[i]);
13023 static ibf_offset_t
13029 int *iseq_indices =
ALLOCA_N(
int, table->size);
13032 for (i=0; i<table->size; i++) {
13033 iseq_indices[i] = ibf_dump_iseq(dump, table->entries[i].iseq);
13036 const ibf_offset_t offset = ibf_dump_pos(dump);
13038 for (i=0; i<table->size; i++) {
13039 ibf_dump_write_small_value(dump, iseq_indices[i]);
13040 ibf_dump_write_small_value(dump, table->entries[i].type);
13041 ibf_dump_write_small_value(dump, table->entries[i].start);
13042 ibf_dump_write_small_value(dump, table->entries[i].end);
13043 ibf_dump_write_small_value(dump, table->entries[i].cont);
13044 ibf_dump_write_small_value(dump, table->entries[i].sp);
13049 return ibf_dump_pos(dump);
13054 ibf_load_catch_table(
const struct ibf_load *load, ibf_offset_t catch_table_offset,
unsigned int size)
13058 table->size = size;
13060 ibf_offset_t reading_pos = catch_table_offset;
13063 for (i=0; i<table->size; i++) {
13064 int iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13065 table->entries[i].type = (
enum rb_catch_type)ibf_load_small_value(load, &reading_pos);
13066 table->entries[i].start = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13067 table->entries[i].end = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13068 table->entries[i].cont = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13069 table->entries[i].sp = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13071 table->entries[i].iseq = ibf_load_iseq(load, (
const rb_iseq_t *)(
VALUE)iseq_index);
13080 static ibf_offset_t
13084 const unsigned int ci_size = body->ci_size;
13087 ibf_offset_t offset = ibf_dump_pos(dump);
13091 for (i = 0; i < ci_size; i++) {
13094 ibf_dump_write_small_value(dump, ibf_dump_id(dump, vm_ci_mid(ci)));
13095 ibf_dump_write_small_value(dump, vm_ci_flag(ci));
13096 ibf_dump_write_small_value(dump, vm_ci_argc(ci));
13100 int len = kwarg->keyword_len;
13101 ibf_dump_write_small_value(dump,
len);
13102 for (
int j=0; j<
len; j++) {
13103 VALUE keyword = ibf_dump_object(dump, kwarg->keywords[j]);
13104 ibf_dump_write_small_value(dump, keyword);
13108 ibf_dump_write_small_value(dump, 0);
13113 ibf_dump_write_small_value(dump, (
VALUE)-1);
13131 static enum rb_id_table_iterator_result
13132 store_outer_variable(
ID id,
VALUE val,
void *dump)
13139 return ID_TABLE_CONTINUE;
13143 outer_variable_cmp(
const void *a,
const void *b,
void *arg)
13150 static ibf_offset_t
13153 struct rb_id_table * ovs = ISEQ_BODY(iseq)->outer_variables;
13155 ibf_offset_t offset = ibf_dump_pos(dump);
13157 size_t size = ovs ? rb_id_table_size(ovs) : 0;
13158 ibf_dump_write_small_value(dump, (
VALUE)size);
13167 rb_id_table_foreach(ovs, store_outer_variable, ovlist);
13169 for (
size_t i = 0; i < size; ++i) {
13170 ID id = ovlist->pairs[i].id;
13171 ID val = ovlist->pairs[i].val;
13172 ibf_dump_write_small_value(dump, ibf_dump_id(dump,
id));
13173 ibf_dump_write_small_value(dump, val);
13182 ibf_load_ci_entries(
const struct ibf_load *load,
13183 ibf_offset_t ci_entries_offset,
13184 unsigned int ci_size,
13187 ibf_offset_t reading_pos = ci_entries_offset;
13194 for (i = 0; i < ci_size; i++) {
13195 VALUE mid_index = ibf_load_small_value(load, &reading_pos);
13196 if (mid_index != (
VALUE)-1) {
13197 ID mid = ibf_load_id(load, mid_index);
13198 unsigned int flag = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13199 unsigned int argc = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13202 int kwlen = (int)ibf_load_small_value(load, &reading_pos);
13205 kwarg->references = 0;
13206 kwarg->keyword_len = kwlen;
13207 for (
int j=0; j<kwlen; j++) {
13208 VALUE keyword = ibf_load_small_value(load, &reading_pos);
13209 kwarg->keywords[j] = ibf_load_object(load, keyword);
13213 cds[i].ci = vm_ci_new(mid, flag, argc, kwarg);
13215 cds[i].cc = vm_cc_empty();
13226 ibf_load_outer_variables(
const struct ibf_load * load, ibf_offset_t outer_variables_offset)
13228 ibf_offset_t reading_pos = outer_variables_offset;
13232 size_t table_size = (size_t)ibf_load_small_value(load, &reading_pos);
13234 if (table_size > 0) {
13235 tbl = rb_id_table_create(table_size);
13238 for (
size_t i = 0; i < table_size; i++) {
13239 ID key = ibf_load_id(load, (
ID)ibf_load_small_value(load, &reading_pos));
13240 VALUE value = ibf_load_small_value(load, &reading_pos);
13241 if (!key) key = rb_make_temporary_id(i);
13242 rb_id_table_insert(tbl, key, value);
13248 static ibf_offset_t
13251 RUBY_ASSERT(dump->current_buffer == &dump->global_buffer);
13253 unsigned int *positions;
13257 const VALUE location_pathobj_index = ibf_dump_object(dump, body->location.pathobj);
13258 const VALUE location_base_label_index = ibf_dump_object(dump, body->location.base_label);
13259 const VALUE location_label_index = ibf_dump_object(dump, body->location.label);
13261 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13262 ibf_offset_t iseq_start = ibf_dump_pos(dump);
13267 buffer.obj_table = ibf_dump_object_table_new();
13268 dump->current_buffer = &buffer;
13271 const ibf_offset_t bytecode_offset = ibf_dump_code(dump, iseq);
13272 const ibf_offset_t bytecode_size = ibf_dump_pos(dump) - bytecode_offset;
13273 const ibf_offset_t param_opt_table_offset = ibf_dump_param_opt_table(dump, iseq);
13274 const ibf_offset_t param_keyword_offset = ibf_dump_param_keyword(dump, iseq);
13275 const ibf_offset_t insns_info_body_offset = ibf_dump_insns_info_body(dump, iseq);
13277 positions = rb_iseq_insns_info_decode_positions(ISEQ_BODY(iseq));
13278 const ibf_offset_t insns_info_positions_offset = ibf_dump_insns_info_positions(dump, positions, body->insns_info.size);
13281 const ibf_offset_t local_table_offset = ibf_dump_local_table(dump, iseq);
13282 const unsigned int catch_table_size = body->catch_table ? body->catch_table->size : 0;
13283 const ibf_offset_t catch_table_offset = ibf_dump_catch_table(dump, iseq);
13284 const int parent_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->parent_iseq);
13285 const int local_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->local_iseq);
13286 const int mandatory_only_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->mandatory_only_iseq);
13287 const ibf_offset_t ci_entries_offset = ibf_dump_ci_entries(dump, iseq);
13288 const ibf_offset_t outer_variables_offset = ibf_dump_outer_variables(dump, iseq);
13290 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13291 ibf_offset_t local_obj_list_offset;
13292 unsigned int local_obj_list_size;
13294 ibf_dump_object_list(dump, &local_obj_list_offset, &local_obj_list_size);
13297 ibf_offset_t body_offset = ibf_dump_pos(dump);
13300 unsigned int param_flags =
13301 (body->
param.flags.has_lead << 0) |
13302 (body->
param.flags.has_opt << 1) |
13303 (body->
param.flags.has_rest << 2) |
13304 (body->
param.flags.has_post << 3) |
13305 (body->
param.flags.has_kw << 4) |
13306 (body->
param.flags.has_kwrest << 5) |
13307 (body->
param.flags.has_block << 6) |
13308 (body->
param.flags.ambiguous_param0 << 7) |
13309 (body->
param.flags.accepts_no_kwarg << 8) |
13310 (body->
param.flags.ruby2_keywords << 9) |
13311 (body->
param.flags.anon_rest << 10) |
13312 (body->
param.flags.anon_kwrest << 11) |
13313 (body->
param.flags.use_block << 12) |
13314 (body->
param.flags.forwardable << 13) ;
13316 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13317 # define IBF_BODY_OFFSET(x) (x)
13319 # define IBF_BODY_OFFSET(x) (body_offset - (x))
13322 ibf_dump_write_small_value(dump, body->type);
13323 ibf_dump_write_small_value(dump, body->iseq_size);
13324 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(bytecode_offset));
13325 ibf_dump_write_small_value(dump, bytecode_size);
13326 ibf_dump_write_small_value(dump, param_flags);
13327 ibf_dump_write_small_value(dump, body->
param.size);
13328 ibf_dump_write_small_value(dump, body->
param.lead_num);
13329 ibf_dump_write_small_value(dump, body->
param.opt_num);
13330 ibf_dump_write_small_value(dump, body->
param.rest_start);
13331 ibf_dump_write_small_value(dump, body->
param.post_start);
13332 ibf_dump_write_small_value(dump, body->
param.post_num);
13333 ibf_dump_write_small_value(dump, body->
param.block_start);
13334 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(param_opt_table_offset));
13335 ibf_dump_write_small_value(dump, param_keyword_offset);
13336 ibf_dump_write_small_value(dump, location_pathobj_index);
13337 ibf_dump_write_small_value(dump, location_base_label_index);
13338 ibf_dump_write_small_value(dump, location_label_index);
13339 ibf_dump_write_small_value(dump, body->location.first_lineno);
13340 ibf_dump_write_small_value(dump, body->location.node_id);
13341 ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.lineno);
13342 ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.column);
13343 ibf_dump_write_small_value(dump, body->location.code_location.end_pos.lineno);
13344 ibf_dump_write_small_value(dump, body->location.code_location.end_pos.column);
13345 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_body_offset));
13346 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_positions_offset));
13347 ibf_dump_write_small_value(dump, body->insns_info.size);
13348 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(local_table_offset));
13349 ibf_dump_write_small_value(dump, catch_table_size);
13350 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(catch_table_offset));
13351 ibf_dump_write_small_value(dump, parent_iseq_index);
13352 ibf_dump_write_small_value(dump, local_iseq_index);
13353 ibf_dump_write_small_value(dump, mandatory_only_iseq_index);
13354 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(ci_entries_offset));
13355 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(outer_variables_offset));
13356 ibf_dump_write_small_value(dump, body->variable.flip_count);
13357 ibf_dump_write_small_value(dump, body->local_table_size);
13358 ibf_dump_write_small_value(dump, body->ivc_size);
13359 ibf_dump_write_small_value(dump, body->icvarc_size);
13360 ibf_dump_write_small_value(dump, body->ise_size);
13361 ibf_dump_write_small_value(dump, body->ic_size);
13362 ibf_dump_write_small_value(dump, body->ci_size);
13363 ibf_dump_write_small_value(dump, body->stack_max);
13364 ibf_dump_write_small_value(dump, body->builtin_attrs);
13365 ibf_dump_write_small_value(dump, body->prism ? 1 : 0);
13367 #undef IBF_BODY_OFFSET
13369 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13370 ibf_offset_t iseq_length_bytes = ibf_dump_pos(dump);
13372 dump->current_buffer = saved_buffer;
13373 ibf_dump_write(dump,
RSTRING_PTR(buffer.str), iseq_length_bytes);
13375 ibf_offset_t offset = ibf_dump_pos(dump);
13376 ibf_dump_write_small_value(dump, iseq_start);
13377 ibf_dump_write_small_value(dump, iseq_length_bytes);
13378 ibf_dump_write_small_value(dump, body_offset);
13380 ibf_dump_write_small_value(dump, local_obj_list_offset);
13381 ibf_dump_write_small_value(dump, local_obj_list_size);
13383 st_free_table(buffer.obj_table);
13387 return body_offset;
13392 ibf_load_location_str(
const struct ibf_load *load,
VALUE str_index)
13394 VALUE str = ibf_load_object(load, str_index);
13396 str = rb_fstring(str);
13402 ibf_load_iseq_each(
struct ibf_load *load,
rb_iseq_t *iseq, ibf_offset_t offset)
13406 ibf_offset_t reading_pos = offset;
13408 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13410 load->current_buffer = &load->global_buffer;
13412 const ibf_offset_t iseq_start = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13413 const ibf_offset_t iseq_length_bytes = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13414 const ibf_offset_t body_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13417 buffer.buff = load->global_buffer.buff + iseq_start;
13418 buffer.size = iseq_length_bytes;
13419 buffer.obj_list_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13420 buffer.obj_list_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13421 buffer.obj_list = pinned_list_new(buffer.obj_list_size);
13423 load->current_buffer = &buffer;
13424 reading_pos = body_offset;
13427 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13428 # define IBF_BODY_OFFSET(x) (x)
13430 # define IBF_BODY_OFFSET(x) (offset - (x))
13433 const unsigned int type = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13434 const unsigned int iseq_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13435 const ibf_offset_t bytecode_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13436 const ibf_offset_t bytecode_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13437 const unsigned int param_flags = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13438 const unsigned int param_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13439 const int param_lead_num = (int)ibf_load_small_value(load, &reading_pos);
13440 const int param_opt_num = (int)ibf_load_small_value(load, &reading_pos);
13441 const int param_rest_start = (int)ibf_load_small_value(load, &reading_pos);
13442 const int param_post_start = (int)ibf_load_small_value(load, &reading_pos);
13443 const int param_post_num = (int)ibf_load_small_value(load, &reading_pos);
13444 const int param_block_start = (int)ibf_load_small_value(load, &reading_pos);
13445 const ibf_offset_t param_opt_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13446 const ibf_offset_t param_keyword_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13447 const VALUE location_pathobj_index = ibf_load_small_value(load, &reading_pos);
13448 const VALUE location_base_label_index = ibf_load_small_value(load, &reading_pos);
13449 const VALUE location_label_index = ibf_load_small_value(load, &reading_pos);
13450 const int location_first_lineno = (int)ibf_load_small_value(load, &reading_pos);
13451 const int location_node_id = (int)ibf_load_small_value(load, &reading_pos);
13452 const int location_code_location_beg_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13453 const int location_code_location_beg_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13454 const int location_code_location_end_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13455 const int location_code_location_end_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13456 const ibf_offset_t insns_info_body_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13457 const ibf_offset_t insns_info_positions_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13458 const unsigned int insns_info_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13459 const ibf_offset_t local_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13460 const unsigned int catch_table_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13461 const ibf_offset_t catch_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13462 const int parent_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13463 const int local_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13464 const int mandatory_only_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13465 const ibf_offset_t ci_entries_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13466 const ibf_offset_t outer_variables_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13467 const rb_snum_t variable_flip_count = (rb_snum_t)ibf_load_small_value(load, &reading_pos);
13468 const unsigned int local_table_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13470 const unsigned int ivc_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13471 const unsigned int icvarc_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13472 const unsigned int ise_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13473 const unsigned int ic_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13475 const unsigned int ci_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13476 const unsigned int stack_max = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13477 const unsigned int builtin_attrs = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13478 const bool prism = (bool)ibf_load_small_value(load, &reading_pos);
13481 VALUE path = ibf_load_object(load, location_pathobj_index);
13486 realpath = path = rb_fstring(path);
13489 VALUE pathobj = path;
13495 if (!
NIL_P(realpath)) {
13498 "(%x), path=%+"PRIsVALUE,
13499 realpath,
TYPE(realpath), path);
13501 realpath = rb_fstring(realpath);
13507 rb_iseq_pathobj_set(iseq, path, realpath);
13512 VALUE dummy_frame = rb_vm_push_frame_fname(ec, path);
13514 #undef IBF_BODY_OFFSET
13516 load_body->type =
type;
13517 load_body->stack_max = stack_max;
13518 load_body->
param.flags.has_lead = (param_flags >> 0) & 1;
13519 load_body->
param.flags.has_opt = (param_flags >> 1) & 1;
13520 load_body->
param.flags.has_rest = (param_flags >> 2) & 1;
13521 load_body->
param.flags.has_post = (param_flags >> 3) & 1;
13522 load_body->
param.flags.has_kw = FALSE;
13523 load_body->
param.flags.has_kwrest = (param_flags >> 5) & 1;
13524 load_body->
param.flags.has_block = (param_flags >> 6) & 1;
13525 load_body->
param.flags.ambiguous_param0 = (param_flags >> 7) & 1;
13526 load_body->
param.flags.accepts_no_kwarg = (param_flags >> 8) & 1;
13527 load_body->
param.flags.ruby2_keywords = (param_flags >> 9) & 1;
13528 load_body->
param.flags.anon_rest = (param_flags >> 10) & 1;
13529 load_body->
param.flags.anon_kwrest = (param_flags >> 11) & 1;
13530 load_body->
param.flags.use_block = (param_flags >> 12) & 1;
13531 load_body->
param.flags.forwardable = (param_flags >> 13) & 1;
13532 load_body->
param.size = param_size;
13533 load_body->
param.lead_num = param_lead_num;
13534 load_body->
param.opt_num = param_opt_num;
13535 load_body->
param.rest_start = param_rest_start;
13536 load_body->
param.post_start = param_post_start;
13537 load_body->
param.post_num = param_post_num;
13538 load_body->
param.block_start = param_block_start;
13539 load_body->local_table_size = local_table_size;
13540 load_body->ci_size = ci_size;
13541 load_body->insns_info.size = insns_info_size;
13543 ISEQ_COVERAGE_SET(iseq,
Qnil);
13544 ISEQ_ORIGINAL_ISEQ_CLEAR(iseq);
13545 load_body->variable.flip_count = variable_flip_count;
13546 load_body->variable.script_lines =
Qnil;
13548 load_body->location.first_lineno = location_first_lineno;
13549 load_body->location.node_id = location_node_id;
13550 load_body->location.code_location.beg_pos.lineno = location_code_location_beg_pos_lineno;
13551 load_body->location.code_location.beg_pos.column = location_code_location_beg_pos_column;
13552 load_body->location.code_location.end_pos.lineno = location_code_location_end_pos_lineno;
13553 load_body->location.code_location.end_pos.column = location_code_location_end_pos_column;
13554 load_body->builtin_attrs = builtin_attrs;
13555 load_body->prism = prism;
13557 load_body->ivc_size = ivc_size;
13558 load_body->icvarc_size = icvarc_size;
13559 load_body->ise_size = ise_size;
13560 load_body->ic_size = ic_size;
13562 if (ISEQ_IS_SIZE(load_body)) {
13566 load_body->is_entries = NULL;
13568 ibf_load_ci_entries(load, ci_entries_offset, ci_size, &load_body->call_data);
13569 load_body->outer_variables = ibf_load_outer_variables(load, outer_variables_offset);
13570 load_body->
param.opt_table = ibf_load_param_opt_table(load, param_opt_table_offset, param_opt_num);
13571 load_body->
param.keyword = ibf_load_param_keyword(load, param_keyword_offset);
13572 load_body->
param.flags.has_kw = (param_flags >> 4) & 1;
13573 load_body->insns_info.body = ibf_load_insns_info_body(load, insns_info_body_offset, insns_info_size);
13574 load_body->insns_info.positions = ibf_load_insns_info_positions(load, insns_info_positions_offset, insns_info_size);
13575 load_body->local_table = ibf_load_local_table(load, local_table_offset, local_table_size);
13576 load_body->catch_table = ibf_load_catch_table(load, catch_table_offset, catch_table_size);
13577 load_body->parent_iseq = ibf_load_iseq(load, (
const rb_iseq_t *)(
VALUE)parent_iseq_index);
13578 load_body->local_iseq = ibf_load_iseq(load, (
const rb_iseq_t *)(
VALUE)local_iseq_index);
13579 load_body->mandatory_only_iseq = ibf_load_iseq(load, (
const rb_iseq_t *)(
VALUE)mandatory_only_iseq_index);
13582 if (load_body->
param.keyword != NULL) {
13584 struct rb_iseq_param_keyword *keyword = (
struct rb_iseq_param_keyword *) load_body->
param.keyword;
13585 keyword->table = &load_body->local_table[keyword->bits_start - keyword->num];
13588 ibf_load_code(load, iseq, bytecode_offset, bytecode_size, iseq_size);
13589 #if VM_INSN_INFO_TABLE_IMPL == 2
13590 rb_iseq_insns_info_encode_positions(iseq);
13593 rb_iseq_translate_threaded_code(iseq);
13595 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13596 load->current_buffer = &load->global_buffer;
13599 RB_OBJ_WRITE(iseq, &load_body->location.base_label, ibf_load_location_str(load, location_base_label_index));
13600 RB_OBJ_WRITE(iseq, &load_body->location.label, ibf_load_location_str(load, location_label_index));
13602 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13603 load->current_buffer = saved_buffer;
13605 verify_call_cache(iseq);
13608 rb_vm_pop_frame_no_int(ec);
13618 ibf_dump_iseq_list_i(st_data_t key, st_data_t val, st_data_t
ptr)
13623 ibf_offset_t offset = ibf_dump_iseq_each(args->dump, iseq);
13626 return ST_CONTINUE;
13636 args.offset_list = offset_list;
13638 st_foreach(dump->iseq_table, ibf_dump_iseq_list_i, (st_data_t)&args);
13641 st_index_t size = dump->iseq_table->num_entries;
13642 ibf_offset_t *offsets =
ALLOCA_N(ibf_offset_t, size);
13644 for (i = 0; i < size; i++) {
13648 ibf_dump_align(dump,
sizeof(ibf_offset_t));
13649 header->iseq_list_offset = ibf_dump_write(dump, offsets,
sizeof(ibf_offset_t) * size);
13650 header->iseq_list_size = (
unsigned int)size;
13653 #define IBF_OBJECT_INTERNAL FL_PROMOTED0
13662 unsigned int type: 5;
13663 unsigned int special_const: 1;
13664 unsigned int frozen: 1;
13665 unsigned int internal: 1;
13668 enum ibf_object_class_index {
13669 IBF_OBJECT_CLASS_OBJECT,
13670 IBF_OBJECT_CLASS_ARRAY,
13671 IBF_OBJECT_CLASS_STANDARD_ERROR,
13672 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR,
13673 IBF_OBJECT_CLASS_TYPE_ERROR,
13674 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR,
13684 long keyval[FLEX_ARY_LEN];
13697 BDIGIT digits[FLEX_ARY_LEN];
13700 enum ibf_object_data_type {
13701 IBF_OBJECT_DATA_ENCODING,
13712 #define IBF_ALIGNED_OFFSET(align, offset) \
13713 ((((offset) - 1) / (align) + 1) * (align))
13714 #define IBF_OBJBODY(type, offset) (const type *)\
13715 ibf_load_check_offset(load, IBF_ALIGNED_OFFSET(RUBY_ALIGNOF(type), offset))
13717 static const void *
13718 ibf_load_check_offset(
const struct ibf_load *load,
size_t offset)
13720 if (offset >= load->current_buffer->size) {
13723 return load->current_buffer->buff + offset;
13726 NORETURN(
static void ibf_dump_object_unsupported(
struct ibf_dump *dump,
VALUE obj));
13729 ibf_dump_object_unsupported(
struct ibf_dump *dump,
VALUE obj)
13732 rb_raw_obj_info(buff,
sizeof(buff), obj);
13748 enum ibf_object_class_index cindex;
13749 if (obj == rb_cObject) {
13750 cindex = IBF_OBJECT_CLASS_OBJECT;
13753 cindex = IBF_OBJECT_CLASS_ARRAY;
13756 cindex = IBF_OBJECT_CLASS_STANDARD_ERROR;
13759 cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR;
13762 cindex = IBF_OBJECT_CLASS_TYPE_ERROR;
13765 cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR;
13768 rb_obj_info_dump(obj);
13770 rb_bug(
"unsupported class");
13772 ibf_dump_write_small_value(dump, (
VALUE)cindex);
13778 enum ibf_object_class_index cindex = (
enum ibf_object_class_index)ibf_load_small_value(load, &offset);
13781 case IBF_OBJECT_CLASS_OBJECT:
13783 case IBF_OBJECT_CLASS_ARRAY:
13785 case IBF_OBJECT_CLASS_STANDARD_ERROR:
13787 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR:
13789 case IBF_OBJECT_CLASS_TYPE_ERROR:
13791 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR:
13803 (void)IBF_W(&dbl,
double, 1);
13809 const double *dblp = IBF_OBJBODY(
double, offset);
13820 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
13823 encindex = RUBY_ENCINDEX_BUILTIN_MAX + ibf_dump_object(dump,
rb_str_new2(enc_name));
13826 ibf_dump_write_small_value(dump, encindex);
13827 ibf_dump_write_small_value(dump,
len);
13834 ibf_offset_t reading_pos = offset;
13836 int encindex = (int)ibf_load_small_value(load, &reading_pos);
13837 const long len = (long)ibf_load_small_value(load, &reading_pos);
13838 const char *
ptr = load->current_buffer->buff + reading_pos;
13840 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
13841 VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
13846 if (header->frozen && !header->internal) {
13853 if (header->frozen) str = rb_fstring(str);
13864 regexp.srcstr = (long)ibf_dump_object(dump, srcstr);
13866 ibf_dump_write_byte(dump, (
unsigned char)regexp.option);
13867 ibf_dump_write_small_value(dump, regexp.srcstr);
13874 regexp.option = ibf_load_byte(load, &offset);
13875 regexp.srcstr = ibf_load_small_value(load, &offset);
13877 VALUE srcstr = ibf_load_object(load, regexp.srcstr);
13878 VALUE reg = rb_reg_compile(srcstr, (
int)regexp.option, NULL, 0);
13890 ibf_dump_write_small_value(dump,
len);
13891 for (i=0; i<
len; i++) {
13892 long index = (long)ibf_dump_object(dump,
RARRAY_AREF(obj, i));
13893 ibf_dump_write_small_value(dump, index);
13900 ibf_offset_t reading_pos = offset;
13902 const long len = (long)ibf_load_small_value(load, &reading_pos);
13907 for (i=0; i<
len; i++) {
13908 const VALUE index = ibf_load_small_value(load, &reading_pos);
13918 ibf_dump_object_hash_i(st_data_t key, st_data_t val, st_data_t
ptr)
13922 VALUE key_index = ibf_dump_object(dump, (
VALUE)key);
13923 VALUE val_index = ibf_dump_object(dump, (
VALUE)val);
13925 ibf_dump_write_small_value(dump, key_index);
13926 ibf_dump_write_small_value(dump, val_index);
13927 return ST_CONTINUE;
13934 ibf_dump_write_small_value(dump, (
VALUE)
len);
13942 long len = (long)ibf_load_small_value(load, &offset);
13943 VALUE obj = rb_hash_new_with_size(
len);
13946 for (i = 0; i <
len; i++) {
13947 VALUE key_index = ibf_load_small_value(load, &offset);
13948 VALUE val_index = ibf_load_small_value(load, &offset);
13950 VALUE key = ibf_load_object(load, key_index);
13951 VALUE val = ibf_load_object(load, val_index);
13954 rb_hash_rehash(obj);
13970 range.class_index = 0;
13973 range.beg = (long)ibf_dump_object(dump, beg);
13974 range.end = (long)ibf_dump_object(dump, end);
13989 VALUE beg = ibf_load_object(load, range->beg);
13990 VALUE end = ibf_load_object(load, range->end);
14000 ssize_t
len = BIGNUM_LEN(obj);
14001 ssize_t slen = BIGNUM_SIGN(obj) > 0 ?
len :
len * -1;
14002 BDIGIT *d = BIGNUM_DIGITS(obj);
14004 (void)IBF_W(&slen, ssize_t, 1);
14005 IBF_WP(d, BDIGIT,
len);
14012 int sign = bignum->slen > 0;
14013 ssize_t
len = sign > 0 ? bignum->slen : -1 * bignum->slen;
14014 const int big_unpack_flags =
14028 if (rb_data_is_encoding(obj)) {
14031 long len = strlen(name) + 1;
14033 data[0] = IBF_OBJECT_DATA_ENCODING;
14035 (void)IBF_W(data,
long, 2);
14036 IBF_WP(name,
char,
len);
14039 ibf_dump_object_unsupported(dump, obj);
14046 const long *body = IBF_OBJBODY(
long, offset);
14047 const enum ibf_object_data_type
type = (
enum ibf_object_data_type)body[0];
14049 const char *data = (
const char *)&body[2];
14052 case IBF_OBJECT_DATA_ENCODING:
14059 return ibf_load_object_unsupported(load, header, offset);
14063 ibf_dump_object_complex_rational(
struct ibf_dump *dump,
VALUE obj)
14066 data[0] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->real);
14067 data[1] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->imag);
14069 (void)IBF_W(data,
long, 2);
14073 ibf_load_object_complex_rational(
const struct ibf_load *load,
const struct ibf_object_header *header, ibf_offset_t offset)
14076 VALUE a = ibf_load_object(load, nums->a);
14077 VALUE b = ibf_load_object(load, nums->b);
14089 ibf_dump_object_string(dump,
rb_sym2str(obj));
14095 ibf_offset_t reading_pos = offset;
14097 int encindex = (int)ibf_load_small_value(load, &reading_pos);
14098 const long len = (long)ibf_load_small_value(load, &reading_pos);
14099 const char *
ptr = load->current_buffer->buff + reading_pos;
14101 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14102 VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
14110 typedef void (*ibf_dump_object_function)(
struct ibf_dump *dump,
VALUE obj);
14111 static const ibf_dump_object_function dump_object_functions[
RUBY_T_MASK+1] = {
14112 ibf_dump_object_unsupported,
14113 ibf_dump_object_unsupported,
14114 ibf_dump_object_class,
14115 ibf_dump_object_unsupported,
14116 ibf_dump_object_float,
14117 ibf_dump_object_string,
14118 ibf_dump_object_regexp,
14119 ibf_dump_object_array,
14120 ibf_dump_object_hash,
14121 ibf_dump_object_struct,
14122 ibf_dump_object_bignum,
14123 ibf_dump_object_unsupported,
14124 ibf_dump_object_data,
14125 ibf_dump_object_unsupported,
14126 ibf_dump_object_complex_rational,
14127 ibf_dump_object_complex_rational,
14128 ibf_dump_object_unsupported,
14129 ibf_dump_object_unsupported,
14130 ibf_dump_object_unsupported,
14131 ibf_dump_object_unsupported,
14132 ibf_dump_object_symbol,
14133 ibf_dump_object_unsupported,
14134 ibf_dump_object_unsupported,
14135 ibf_dump_object_unsupported,
14136 ibf_dump_object_unsupported,
14137 ibf_dump_object_unsupported,
14138 ibf_dump_object_unsupported,
14139 ibf_dump_object_unsupported,
14140 ibf_dump_object_unsupported,
14141 ibf_dump_object_unsupported,
14142 ibf_dump_object_unsupported,
14143 ibf_dump_object_unsupported,
14149 unsigned char byte =
14150 (header.type << 0) |
14151 (header.special_const << 5) |
14152 (header.frozen << 6) |
14153 (header.internal << 7);
14159 ibf_load_object_object_header(const struct
ibf_load *load, ibf_offset_t *offset)
14161 unsigned char byte = ibf_load_byte(load, offset);
14164 header.type = (
byte >> 0) & 0x1f;
14165 header.special_const = (
byte >> 5) & 0x01;
14166 header.frozen = (
byte >> 6) & 0x01;
14167 header.internal = (
byte >> 7) & 0x01;
14172 static ibf_offset_t
14176 ibf_offset_t current_offset;
14177 IBF_ZERO(obj_header);
14178 obj_header.type =
TYPE(obj);
14180 IBF_W_ALIGN(ibf_offset_t);
14181 current_offset = ibf_dump_pos(dump);
14186 obj_header.special_const = TRUE;
14187 obj_header.frozen = TRUE;
14188 obj_header.internal = TRUE;
14189 ibf_dump_object_object_header(dump, obj_header);
14190 ibf_dump_write_small_value(dump, obj);
14194 obj_header.special_const = FALSE;
14196 ibf_dump_object_object_header(dump, obj_header);
14197 (*dump_object_functions[obj_header.type])(dump, obj);
14200 return current_offset;
14204 static const ibf_load_object_function load_object_functions[
RUBY_T_MASK+1] = {
14205 ibf_load_object_unsupported,
14206 ibf_load_object_unsupported,
14207 ibf_load_object_class,
14208 ibf_load_object_unsupported,
14209 ibf_load_object_float,
14210 ibf_load_object_string,
14211 ibf_load_object_regexp,
14212 ibf_load_object_array,
14213 ibf_load_object_hash,
14214 ibf_load_object_struct,
14215 ibf_load_object_bignum,
14216 ibf_load_object_unsupported,
14217 ibf_load_object_data,
14218 ibf_load_object_unsupported,
14219 ibf_load_object_complex_rational,
14220 ibf_load_object_complex_rational,
14221 ibf_load_object_unsupported,
14222 ibf_load_object_unsupported,
14223 ibf_load_object_unsupported,
14224 ibf_load_object_unsupported,
14225 ibf_load_object_symbol,
14226 ibf_load_object_unsupported,
14227 ibf_load_object_unsupported,
14228 ibf_load_object_unsupported,
14229 ibf_load_object_unsupported,
14230 ibf_load_object_unsupported,
14231 ibf_load_object_unsupported,
14232 ibf_load_object_unsupported,
14233 ibf_load_object_unsupported,
14234 ibf_load_object_unsupported,
14235 ibf_load_object_unsupported,
14236 ibf_load_object_unsupported,
14240 ibf_load_object(
const struct ibf_load *load,
VALUE object_index)
14242 if (object_index == 0) {
14246 VALUE obj = pinned_list_fetch(load->current_buffer->obj_list, (
long)object_index);
14248 ibf_offset_t *offsets = (ibf_offset_t *)(load->current_buffer->obj_list_offset + load->current_buffer->buff);
14249 ibf_offset_t offset = offsets[object_index];
14250 const struct ibf_object_header header = ibf_load_object_object_header(load, &offset);
14253 fprintf(stderr,
"ibf_load_object: list=%#x offsets=%p offset=%#x\n",
14254 load->current_buffer->obj_list_offset, (
void *)offsets, offset);
14255 fprintf(stderr,
"ibf_load_object: type=%#x special=%d frozen=%d internal=%d\n",
14256 header.type, header.special_const, header.frozen, header.internal);
14258 if (offset >= load->current_buffer->size) {
14262 if (header.special_const) {
14263 ibf_offset_t reading_pos = offset;
14265 obj = ibf_load_small_value(load, &reading_pos);
14268 obj = (*load_object_functions[header.type])(load, &header, offset);
14271 pinned_list_store(load->current_buffer->obj_list, (
long)object_index, obj);
14274 fprintf(stderr,
"ibf_load_object: index=%#"PRIxVALUE
" obj=%#"PRIxVALUE
"\n",
14275 object_index, obj);
14288 ibf_dump_object_list_i(st_data_t key, st_data_t val, st_data_t
ptr)
14293 ibf_offset_t offset = ibf_dump_object_object(args->dump, obj);
14296 return ST_CONTINUE;
14300 ibf_dump_object_list(
struct ibf_dump *dump, ibf_offset_t *obj_list_offset,
unsigned int *obj_list_size)
14302 st_table *obj_table = dump->current_buffer->obj_table;
14307 args.offset_list = offset_list;
14309 st_foreach(obj_table, ibf_dump_object_list_i, (st_data_t)&args);
14311 IBF_W_ALIGN(ibf_offset_t);
14312 *obj_list_offset = ibf_dump_pos(dump);
14314 st_index_t size = obj_table->num_entries;
14317 for (i=0; i<size; i++) {
14322 *obj_list_size = (
unsigned int)size;
14326 ibf_dump_mark(
void *
ptr)
14336 ibf_dump_free(
void *
ptr)
14339 if (dump->global_buffer.obj_table) {
14340 st_free_table(dump->global_buffer.obj_table);
14341 dump->global_buffer.obj_table = 0;
14343 if (dump->iseq_table) {
14344 st_free_table(dump->iseq_table);
14345 dump->iseq_table = 0;
14350 ibf_dump_memsize(
const void *
ptr)
14354 if (dump->iseq_table) size += st_memsize(dump->iseq_table);
14355 if (dump->global_buffer.obj_table) size += st_memsize(dump->global_buffer.obj_table);
14361 {ibf_dump_mark, ibf_dump_free, ibf_dump_memsize,},
14362 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_EMBEDDABLE
14368 dump->global_buffer.obj_table = NULL;
14369 dump->iseq_table = NULL;
14372 dump->global_buffer.obj_table = ibf_dump_object_table_new();
14373 dump->iseq_table = st_init_numtable();
14375 dump->current_buffer = &dump->global_buffer;
14386 if (ISEQ_BODY(iseq)->parent_iseq != NULL ||
14387 ISEQ_BODY(iseq)->local_iseq != iseq) {
14390 if (
RTEST(ISEQ_COVERAGE(iseq))) {
14395 ibf_dump_setup(dump, dump_obj);
14397 ibf_dump_write(dump, &header,
sizeof(header));
14398 ibf_dump_iseq(dump, iseq);
14400 header.magic[0] =
'Y';
14401 header.magic[1] =
'A';
14402 header.magic[2] =
'R';
14403 header.magic[3] =
'B';
14404 header.major_version = IBF_MAJOR_VERSION;
14405 header.minor_version = IBF_MINOR_VERSION;
14406 header.endian = IBF_ENDIAN_MARK;
14408 ibf_dump_iseq_list(dump, &header);
14409 ibf_dump_object_list(dump, &header.global_object_list_offset, &header.global_object_list_size);
14410 header.size = ibf_dump_pos(dump);
14413 VALUE opt_str = opt;
14416 ibf_dump_write(dump,
ptr, header.extra_size);
14419 header.extra_size = 0;
14422 ibf_dump_overwrite(dump, &header,
sizeof(header), 0);
14424 str = dump->global_buffer.str;
14429 static const ibf_offset_t *
14430 ibf_iseq_list(
const struct ibf_load *load)
14432 return (
const ibf_offset_t *)(load->global_buffer.buff + load->header->iseq_list_offset);
14436 rb_ibf_load_iseq_complete(
rb_iseq_t *iseq)
14440 ibf_offset_t offset = ibf_iseq_list(load)[iseq->aux.loader.index];
14443 fprintf(stderr,
"rb_ibf_load_iseq_complete: index=%#x offset=%#x size=%#x\n",
14444 iseq->aux.loader.index, offset,
14445 load->header->size);
14447 ibf_load_iseq_each(load, iseq, offset);
14448 ISEQ_COMPILE_DATA_CLEAR(iseq);
14450 rb_iseq_init_trace(iseq);
14451 load->iseq = prev_src_iseq;
14456 rb_iseq_complete(
const rb_iseq_t *iseq)
14458 rb_ibf_load_iseq_complete((
rb_iseq_t *)iseq);
14466 int iseq_index = (int)(
VALUE)index_iseq;
14469 fprintf(stderr,
"ibf_load_iseq: index_iseq=%p iseq_list=%p\n",
14470 (
void *)index_iseq, (
void *)load->iseq_list);
14472 if (iseq_index == -1) {
14476 VALUE iseqv = pinned_list_fetch(load->iseq_list, iseq_index);
14479 fprintf(stderr,
"ibf_load_iseq: iseqv=%p\n", (
void *)iseqv);
14487 fprintf(stderr,
"ibf_load_iseq: new iseq=%p\n", (
void *)iseq);
14490 iseq->aux.loader.obj = load->loader_obj;
14491 iseq->aux.loader.index = iseq_index;
14493 fprintf(stderr,
"ibf_load_iseq: iseq=%p loader_obj=%p index=%d\n",
14494 (
void *)iseq, (
void *)load->loader_obj, iseq_index);
14496 pinned_list_store(load->iseq_list, iseq_index, (
VALUE)iseq);
14498 if (!USE_LAZY_LOAD || GET_VM()->builtin_function_table) {
14500 fprintf(stderr,
"ibf_load_iseq: loading iseq=%p\n", (
void *)iseq);
14502 rb_ibf_load_iseq_complete(iseq);
14506 fprintf(stderr,
"ibf_load_iseq: iseq=%p loaded %p\n",
14507 (
void *)iseq, (
void *)load->iseq);
14515 ibf_load_setup_bytes(
struct ibf_load *load,
VALUE loader_obj,
const char *bytes,
size_t size)
14518 load->loader_obj = loader_obj;
14519 load->global_buffer.buff = bytes;
14520 load->header = header;
14521 load->global_buffer.size = header->size;
14522 load->global_buffer.obj_list_offset = header->global_object_list_offset;
14523 load->global_buffer.obj_list_size = header->global_object_list_size;
14524 RB_OBJ_WRITE(loader_obj, &load->iseq_list, pinned_list_new(header->iseq_list_size));
14525 RB_OBJ_WRITE(loader_obj, &load->global_buffer.obj_list, pinned_list_new(load->global_buffer.obj_list_size));
14528 load->current_buffer = &load->global_buffer;
14530 if (size < header->size) {
14533 if (strncmp(header->magic,
"YARB", 4) != 0) {
14536 if (header->major_version != IBF_MAJOR_VERSION ||
14537 header->minor_version != IBF_MINOR_VERSION) {
14539 header->major_version, header->minor_version, IBF_MAJOR_VERSION, IBF_MINOR_VERSION);
14541 if (header->endian != IBF_ENDIAN_MARK) {
14547 if (header->iseq_list_offset %
RUBY_ALIGNOF(ibf_offset_t)) {
14549 header->iseq_list_offset);
14551 if (load->global_buffer.obj_list_offset %
RUBY_ALIGNOF(ibf_offset_t)) {
14553 load->global_buffer.obj_list_offset);
14566 if (USE_LAZY_LOAD) {
14575 ibf_loader_mark(
void *
ptr)
14584 ibf_loader_free(
void *
ptr)
14591 ibf_loader_memsize(
const void *
ptr)
14598 {ibf_loader_mark, ibf_loader_free, ibf_loader_memsize,},
14599 0, 0, RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_FREE_IMMEDIATELY
14603 rb_iseq_ibf_load(
VALUE str)
14609 ibf_load_setup(load, loader_obj, str);
14610 iseq = ibf_load_iseq(load, 0);
14617 rb_iseq_ibf_load_bytes(
const char *bytes,
size_t size)
14623 ibf_load_setup_bytes(load, loader_obj, bytes, size);
14624 iseq = ibf_load_iseq(load, 0);
14631 rb_iseq_ibf_load_extra_data(
VALUE str)
14637 ibf_load_setup(load, loader_obj, str);
14638 extra_str =
rb_str_new(load->global_buffer.buff + load->header->size, load->header->extra_size);
14643 #include "prism_compile.c"
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ALIGNOF
Wraps (or simulates) alignof.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_NONE
No events.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
#define rb_str_new2
Old name of rb_str_new_cstr.
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
#define TYPE(_)
Old name of rb_type.
#define NUM2ULONG
Old name of RB_NUM2ULONG.
#define NUM2LL
Old name of RB_NUM2LL.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOCV
Old name of RB_ALLOCV.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define xfree
Old name of ruby_xfree.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define UNREACHABLE
Old name of RBIMPL_UNREACHABLE.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define OBJ_FREEZE
Old name of RB_OBJ_FREEZE.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define SYM2ID
Old name of RB_SYM2ID.
#define FIX2UINT
Old name of RB_FIX2UINT.
#define ZALLOC
Old name of RB_ZALLOC.
#define CLASS_OF
Old name of rb_class_of.
#define FIXABLE
Old name of RB_FIXABLE.
#define xmalloc
Old name of ruby_xmalloc.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define NUM2UINT
Old name of RB_NUM2UINT.
#define ZALLOC_N
Old name of RB_ZALLOC_N.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_SET
Old name of RB_FL_SET.
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FL_FREEZE
Old name of RUBY_FL_FREEZE.
#define NUM2LONG
Old name of RB_NUM2LONG.
#define FL_UNSET
Old name of RB_FL_UNSET.
#define UINT2NUM
Old name of RB_UINT2NUM.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define CONST_ID
Old name of RUBY_CONST_ID.
#define ALLOCV_END
Old name of RB_ALLOCV_END.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
#define T_REGEXP
Old name of RUBY_T_REGEXP.
#define ruby_debug
This variable controls whether the interpreter is in debug mode.
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
VALUE rb_eNotImpError
NotImplementedError exception.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
VALUE rb_eStandardError
StandardError exception.
void rb_set_errinfo(VALUE err)
Sets the current exception ($!) to the given value.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eNoMatchingPatternError
NoMatchingPatternError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
VALUE rb_eNoMatchingPatternKeyError
NoMatchingPatternKeyError exception.
VALUE rb_eArgError
ArgumentError exception.
VALUE rb_eIndexError
IndexError exception.
VALUE rb_eSyntaxError
SyntaxError exception.
VALUE rb_obj_reveal(VALUE obj, VALUE klass)
Make a hidden object visible again.
VALUE rb_cArray
Array class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
VALUE rb_cHash
Hash class.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cRange
Range class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
int rb_enc_get_index(VALUE obj)
Queries the index of the encoding of the passed object, if any.
rb_encoding * rb_enc_find(const char *name)
Identical to rb_find_encoding(), except it takes a C's string instead of Ruby's.
rb_encoding * rb_to_encoding(VALUE obj)
Identical to rb_find_encoding(), except it raises an exception instead of returning NULL.
VALUE rb_enc_from_encoding(rb_encoding *enc)
Queries the Ruby-level counterpart instance of rb_cEncoding that corresponds to the passed encoding.
rb_encoding * rb_enc_from_index(int idx)
Identical to rb_find_encoding(), except it takes an encoding index instead of a Ruby object.
static const char * rb_enc_name(rb_encoding *enc)
Queries the (canonical) name of the passed encoding.
int rb_enc_find_index(const char *name)
Queries the index of the encoding.
VALUE rb_enc_str_new(const char *ptr, long len, rb_encoding *enc)
Identical to rb_str_new(), except it additionally takes an encoding.
ID rb_intern3(const char *name, long len, rb_encoding *enc)
Identical to rb_intern2(), except it additionally takes an encoding.
void rb_gc_mark(VALUE obj)
Marks an object.
void rb_memerror(void)
Triggers out-of-memory error.
void rb_mark_set(struct st_table *tbl)
Identical to rb_mark_hash(), except it marks only keys of the table and leave their associated values...
VALUE rb_ary_reverse(VALUE ary)
Destructively reverses the passed array in-place.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_unshift(VALUE ary, VALUE elem)
Destructively prepends the passed item at the beginning of the passed array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_clear(VALUE ary)
Destructively removes everything form an array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_new_from_args(long n,...)
Constructs an array from the passed objects.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
VALUE rb_ary_join(VALUE ary, VALUE sep)
Recursively stringises the elements of the passed array, flattens that result, then joins the sequenc...
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define INTEGER_PACK_NATIVE_BYTE_ORDER
Means either INTEGER_PACK_MSBYTE_FIRST or INTEGER_PACK_LSBYTE_FIRST, depending on the host processor'...
VALUE rb_integer_unpack(const void *words, size_t numwords, size_t wordsize, size_t nails, int flags)
Import an integer from a buffer.
VALUE rb_big_cmp(VALUE lhs, VALUE rhs)
Compares the passed two bignums.
VALUE rb_dbl2big(double d)
Converts a C's double into a bignum.
#define INTEGER_PACK_NEGATIVE
Interprets the input as a signed negative number (unpack only).
#define INTEGER_PACK_LSWORD_FIRST
Stores/interprets the least significant word as the first word.
VALUE rb_complex_new(VALUE real, VALUE imag)
Constructs a Complex, by first multiplying the imaginary part with 1i then adds it to the real part.
void rb_hash_bulk_insert(long argc, const VALUE *argv, VALUE hash)
Inserts a list of key-value pairs into a hash table at once.
void rb_hash_foreach(VALUE hash, int(*func)(VALUE key, VALUE val, VALUE arg), VALUE arg)
Iterates over a hash.
VALUE rb_hash_freeze(VALUE obj)
Just another name of rb_obj_freeze.
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_lookup(VALUE hash, VALUE key)
Identical to rb_hash_aref(), except it always returns RUBY_Qnil for misshits.
VALUE rb_hash_dup(VALUE hash)
Duplicates a hash.
VALUE rb_hash_clear(VALUE hash)
Swipes everything out of the passed hash table.
VALUE rb_hash_new(void)
Creates a new, empty hash object.
int rb_is_const_id(ID id)
Classifies the given ID, then sees if it is a constant.
ID rb_id_attrset(ID id)
Calculates an ID of attribute writer.
int rb_is_attrset_id(ID id)
Classifies the given ID, then sees if it is an attribute writer.
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
VALUE rb_rational_new(VALUE num, VALUE den)
Constructs a Rational, with reduction.
int rb_reg_options(VALUE re)
Queries the options of the passed regular expression.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_tmp_new(long len)
Allocates a "temporary" string.
int rb_str_hash_cmp(VALUE str1, VALUE str2)
Compares two strings.
VALUE rb_str_cat2(VALUE, const char *)
Just another name of rb_str_cat_cstr.
st_index_t rb_str_hash(VALUE str)
Calculates a hash value of a string.
VALUE rb_str_cat(VALUE dst, const char *src, long srclen)
Destructively appends the passed contents to the string.
int rb_str_cmp(VALUE lhs, VALUE rhs)
Compares two strings, as in strcmp(3).
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
VALUE rb_str_freeze(VALUE str)
This is the implementation of String#freeze.
VALUE rb_str_new(const char *ptr, long len)
Allocates an instance of rb_cString.
VALUE rb_str_new_cstr(const char *ptr)
Identical to rb_str_new(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_class_name(VALUE obj)
Queries the name of the given object's class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
VALUE rb_id2sym(ID id)
Allocates an instance of rb_cSymbol that has the given id.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
ID rb_sym2id(VALUE obj)
Converts an instance of rb_cSymbol into an ID.
ID rb_intern_str(VALUE str)
Identical to rb_intern(), except it takes an instance of rb_cString.
VALUE rb_id2str(ID id)
Identical to rb_id2name(), except it returns a Ruby's String instead of C's.
RBIMPL_ATTR_NORETURN() void rb_eof_error(void)
Utility function to raise rb_eEOFError.
char * ptr
Pointer to the underlying memory region, of at least capa bytes.
int len
Length of the buffer.
VALUE rb_ractor_make_shareable(VALUE obj)
Destructively transforms the passed object so that multiple Ractors can share it.
#define DECIMAL_SIZE_OF(expr)
An approximation of decimal representation size.
void ruby_qsort(void *, const size_t, const size_t, int(*)(const void *, const void *, void *), void *)
Reentrant implementation of quick sort.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
#define rb_long2int
Just another name of rb_long2int_inline.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define MEMZERO(p, type, n)
Handy macro to erase a region of memory.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define RB_ALLOCV(v, n)
Identical to RB_ALLOCV_N(), except that it allocates a number of bytes and returns a void* .
VALUE type(ANYARGS)
ANYARGS-ed function type.
int st_foreach(st_table *q, int_type *w, st_data_t e)
Iteration over the given table.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static void RARRAY_ASET(VALUE ary, long i, VALUE v)
Assigns an object in an array.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RUBY_DEFAULT_FREE
This is a value you can set to RData::dfree.
void(* RUBY_DATA_FUNC)(void *)
This is the type of callbacks registered to RData.
#define RHASH_SIZE(h)
Queries the size of the hash.
static VALUE RREGEXP_SRC(VALUE rexp)
Convenient getter function.
#define StringValue(v)
Ensures that the parameter object is a String.
#define StringValuePtr(v)
Identical to StringValue, except it returns a char*.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
static int RSTRING_LENINT(VALUE str)
Identical to RSTRING_LEN(), except it differs for the return type.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RTYPEDDATA_DATA(v)
Convenient getter macro.
VALUE rb_data_typed_object_zalloc(VALUE klass, size_t size, const rb_data_type_t *type)
Identical to rb_data_typed_object_wrap(), except it allocates a new data region internally instead of...
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
#define TypedData_Wrap_Struct(klass, data_type, sval)
Converts sval, a pointer to your struct, into a Ruby object.
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
void rb_p(VALUE obj)
Inspects an object.
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Internal header for Complex.
Internal header for Rational.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
This is the struct that holds necessary info for a struct.
const char * wrap_struct_name
Name of structs of this kind.
struct rb_iseq_constant_body::@150 param
parameter information
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool rb_integer_type_p(VALUE obj)
Queries if the object is an instance of rb_cInteger.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
@ RUBY_T_MASK
Bitmask of ruby_value_type.
void * ruby_xmalloc2(size_t nelems, size_t elemsiz)
Identical to ruby_xmalloc(), except it allocates nelems * elemsiz bytes.
void * ruby_xmalloc(size_t size)
Allocates a storage instance.
void ruby_xfree(void *ptr)
Deallocates a storage instance.