12#include "ruby/internal/config.h"
22#include "internal/array.h"
23#include "internal/compile.h"
24#include "internal/complex.h"
25#include "internal/encoding.h"
26#include "internal/error.h"
27#include "internal/gc.h"
28#include "internal/hash.h"
29#include "internal/io.h"
30#include "internal/numeric.h"
31#include "internal/object.h"
32#include "internal/rational.h"
33#include "internal/re.h"
34#include "internal/ruby_parser.h"
35#include "internal/symbol.h"
36#include "internal/thread.h"
37#include "internal/variable.h"
43#include "vm_callinfo.h"
49#include "insns_info.inc"
51#define FIXNUM_INC(n, i) ((n)+(INT2FIX(i)&~FIXNUM_FLAG))
85 unsigned int rescued: 2;
86 unsigned int unremovable: 1;
91 enum ruby_vminsn_type insn_id;
121 const void *ensure_node;
126const ID rb_iseq_shared_exc_local_tbl[] = {idERROR_INFO};
146#define compile_debug CPDEBUG
148#define compile_debug ISEQ_COMPILE_DATA(iseq)->option->debug_level
153#define compile_debug_print_indent(level) \
154 ruby_debug_print_indent((level), compile_debug, gl_node_level * 2)
156#define debugp(header, value) (void) \
157 (compile_debug_print_indent(1) && \
158 ruby_debug_print_value(1, compile_debug, (header), (value)))
160#define debugi(header, id) (void) \
161 (compile_debug_print_indent(1) && \
162 ruby_debug_print_id(1, compile_debug, (header), (id)))
164#define debugp_param(header, value) (void) \
165 (compile_debug_print_indent(1) && \
166 ruby_debug_print_value(1, compile_debug, (header), (value)))
168#define debugp_verbose(header, value) (void) \
169 (compile_debug_print_indent(2) && \
170 ruby_debug_print_value(2, compile_debug, (header), (value)))
172#define debugp_verbose_node(header, value) (void) \
173 (compile_debug_print_indent(10) && \
174 ruby_debug_print_value(10, compile_debug, (header), (value)))
176#define debug_node_start(node) ((void) \
177 (compile_debug_print_indent(1) && \
178 (ruby_debug_print_node(1, CPDEBUG, "", (const NODE *)(node)), gl_node_level)), \
181#define debug_node_end() gl_node_level --
185#define debugi(header, id) ((void)0)
186#define debugp(header, value) ((void)0)
187#define debugp_verbose(header, value) ((void)0)
188#define debugp_verbose_node(header, value) ((void)0)
189#define debugp_param(header, value) ((void)0)
190#define debug_node_start(node) ((void)0)
191#define debug_node_end() ((void)0)
194#if CPDEBUG > 1 || CPDEBUG < 0
196#define printf ruby_debug_printf
197#define debugs if (compile_debug_print_indent(1)) ruby_debug_printf
198#define debug_compile(msg, v) ((void)(compile_debug_print_indent(1) && fputs((msg), stderr)), (v))
200#define debugs if(0)printf
201#define debug_compile(msg, v) (v)
204#define LVAR_ERRINFO (1)
207#define NEW_LABEL(l) new_label_body(iseq, (l))
208#define LABEL_FORMAT "<L%03d>"
210#define NEW_ISEQ(node, name, type, line_no) \
211 new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
213#define NEW_CHILD_ISEQ(node, name, type, line_no) \
214 new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
216#define NEW_CHILD_ISEQ_WITH_CALLBACK(callback_func, name, type, line_no) \
217 new_child_iseq_with_callback(iseq, (callback_func), (name), iseq, (type), (line_no))
220#define ADD_SEQ(seq1, seq2) \
221 APPEND_LIST((seq1), (seq2))
224#define ADD_INSN(seq, line_node, insn) \
225 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 0))
228#define ADD_SYNTHETIC_INSN(seq, line_no, node_id, insn) \
229 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (line_no), (node_id), BIN(insn), 0))
232#define INSERT_BEFORE_INSN(next, line_no, node_id, insn) \
233 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
236#define INSERT_AFTER_INSN(prev, line_no, node_id, insn) \
237 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
240#define ADD_INSN1(seq, line_node, insn, op1) \
241 ADD_ELEM((seq), (LINK_ELEMENT *) \
242 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 1, (VALUE)(op1)))
245#define INSERT_BEFORE_INSN1(next, line_no, node_id, insn, op1) \
246 ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) \
247 new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
250#define INSERT_AFTER_INSN1(prev, line_no, node_id, insn, op1) \
251 ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) \
252 new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
254#define LABEL_REF(label) ((label)->refcnt++)
257#define ADD_INSNL(seq, line_node, insn, label) (ADD_INSN1(seq, line_node, insn, label), LABEL_REF(label))
259#define ADD_INSN2(seq, line_node, insn, op1, op2) \
260 ADD_ELEM((seq), (LINK_ELEMENT *) \
261 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
263#define ADD_INSN3(seq, line_node, insn, op1, op2, op3) \
264 ADD_ELEM((seq), (LINK_ELEMENT *) \
265 new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
268#define ADD_SEND(seq, line_node, id, argc) \
269 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
271#define ADD_SEND_WITH_FLAG(seq, line_node, id, argc, flag) \
272 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)(flag), NULL)
274#define ADD_SEND_WITH_BLOCK(seq, line_node, id, argc, block) \
275 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
277#define ADD_CALL_RECEIVER(seq, line_node) \
278 ADD_INSN((seq), (line_node), putself)
280#define ADD_CALL(seq, line_node, id, argc) \
281 ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
283#define ADD_CALL_WITH_BLOCK(seq, line_node, id, argc, block) \
284 ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
286#define ADD_SEND_R(seq, line_node, id, argc, block, flag, keywords) \
287 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
289#define ADD_TRACE(seq, event) \
290 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), 0))
291#define ADD_TRACE_WITH_DATA(seq, event, data) \
292 ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), (data)))
294static void iseq_add_getlocal(
rb_iseq_t *iseq,
LINK_ANCHOR *
const seq,
const NODE *
const line_node,
int idx,
int level);
295static void iseq_add_setlocal(
rb_iseq_t *iseq,
LINK_ANCHOR *
const seq,
const NODE *
const line_node,
int idx,
int level);
297#define ADD_GETLOCAL(seq, line_node, idx, level) iseq_add_getlocal(iseq, (seq), (line_node), (idx), (level))
298#define ADD_SETLOCAL(seq, line_node, idx, level) iseq_add_setlocal(iseq, (seq), (line_node), (idx), (level))
301#define ADD_LABEL(seq, label) \
302 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
304#define APPEND_LABEL(seq, before, label) \
305 APPEND_ELEM((seq), (before), (LINK_ELEMENT *) (label))
307#define ADD_ADJUST(seq, line_node, label) \
308 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), nd_line(line_node)))
310#define ADD_ADJUST_RESTORE(seq, label) \
311 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
313#define LABEL_UNREMOVABLE(label) \
314 ((label) ? (LABEL_REF(label), (label)->unremovable=1) : 0)
315#define ADD_CATCH_ENTRY(type, ls, le, iseqv, lc) do { \
316 VALUE _e = rb_ary_new3(5, (type), \
317 (VALUE)(ls) | 1, (VALUE)(le) | 1, \
318 (VALUE)(iseqv), (VALUE)(lc) | 1); \
319 LABEL_UNREMOVABLE(ls); \
322 if (NIL_P(ISEQ_COMPILE_DATA(iseq)->catch_table_ary)) \
323 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, rb_ary_hidden_new(3)); \
324 rb_ary_push(ISEQ_COMPILE_DATA(iseq)->catch_table_ary, freeze_hide_obj(_e)); \
328#define COMPILE(anchor, desc, node) \
329 (debug_compile("== " desc "\n", \
330 iseq_compile_each(iseq, (anchor), (node), 0)))
333#define COMPILE_POPPED(anchor, desc, node) \
334 (debug_compile("== " desc "\n", \
335 iseq_compile_each(iseq, (anchor), (node), 1)))
338#define COMPILE_(anchor, desc, node, popped) \
339 (debug_compile("== " desc "\n", \
340 iseq_compile_each(iseq, (anchor), (node), (popped))))
342#define COMPILE_RECV(anchor, desc, node, recv) \
343 (private_recv_p(node) ? \
344 (ADD_INSN(anchor, node, putself), VM_CALL_FCALL) : \
345 COMPILE(anchor, desc, recv) ? 0 : -1)
347#define OPERAND_AT(insn, idx) \
348 (((INSN*)(insn))->operands[(idx)])
350#define INSN_OF(insn) \
351 (((INSN*)(insn))->insn_id)
353#define IS_INSN(link) ((link)->type == ISEQ_ELEMENT_INSN)
354#define IS_LABEL(link) ((link)->type == ISEQ_ELEMENT_LABEL)
355#define IS_ADJUST(link) ((link)->type == ISEQ_ELEMENT_ADJUST)
356#define IS_TRACE(link) ((link)->type == ISEQ_ELEMENT_TRACE)
357#define IS_INSN_ID(iobj, insn) (INSN_OF(iobj) == BIN(insn))
358#define IS_NEXT_INSN_ID(link, insn) \
359 ((link)->next && IS_INSN((link)->next) && IS_INSN_ID((link)->next, insn))
367append_compile_error(const
rb_iseq_t *iseq,
int line, const
char *fmt, ...)
369 VALUE err_info = ISEQ_COMPILE_DATA(iseq)->err_info;
370 VALUE file = rb_iseq_path(iseq);
375 err = rb_syntax_error_append(err, file, line, -1, NULL, fmt, args);
377 if (
NIL_P(err_info)) {
378 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->err_info, err);
381 else if (!err_info) {
392compile_bug(
rb_iseq_t *iseq,
int line,
const char *fmt, ...)
396 rb_report_bug_valist(rb_iseq_path(iseq), line, fmt, args);
402#define COMPILE_ERROR append_compile_error
404#define ERROR_ARGS_AT(n) iseq, nd_line(n),
405#define ERROR_ARGS ERROR_ARGS_AT(node)
407#define EXPECT_NODE(prefix, node, ndtype, errval) \
409 const NODE *error_node = (node); \
410 enum node_type error_type = nd_type(error_node); \
411 if (error_type != (ndtype)) { \
412 COMPILE_ERROR(ERROR_ARGS_AT(error_node) \
413 prefix ": " #ndtype " is expected, but %s", \
414 ruby_node_name(error_type)); \
419#define EXPECT_NODE_NONULL(prefix, parent, ndtype, errval) \
421 COMPILE_ERROR(ERROR_ARGS_AT(parent) \
422 prefix ": must be " #ndtype ", but 0"); \
426#define UNKNOWN_NODE(prefix, node, errval) \
428 const NODE *error_node = (node); \
429 COMPILE_ERROR(ERROR_ARGS_AT(error_node) prefix ": unknown node (%s)", \
430 ruby_node_name(nd_type(error_node))); \
437#define CHECK(sub) if (!(sub)) {BEFORE_RETURN;return COMPILE_NG;}
438#define NO_CHECK(sub) (void)(sub)
441#define DECL_ANCHOR(name) \
442 LINK_ANCHOR name[1] = {{{ISEQ_ELEMENT_ANCHOR,},&name[0].anchor}}
443#define INIT_ANCHOR(name) \
444 ((name->last = &name->anchor)->next = NULL)
447freeze_hide_obj(
VALUE obj)
450 RBASIC_CLEAR_CLASS(obj);
454#include "optinsn.inc"
455#if OPT_INSTRUCTIONS_UNIFICATION
456#include "optunifs.inc"
461#define ISEQ_ARG iseq,
462#define ISEQ_ARG_DECLARE rb_iseq_t *iseq,
465#define ISEQ_ARG_DECLARE
469#define gl_node_level ISEQ_COMPILE_DATA(iseq)->node_level
475static int insn_data_length(
INSN *iobj);
476static int calc_sp_depth(
int depth,
INSN *iobj);
478static INSN *new_insn_body(
rb_iseq_t *iseq,
int line_no,
int node_id,
enum ruby_vminsn_type insn_id,
int argc, ...);
491static int iseq_set_exception_local_table(
rb_iseq_t *iseq);
495static int iseq_set_exception_table(
rb_iseq_t *iseq);
496static int iseq_set_optargs_table(
rb_iseq_t *iseq);
497static int iseq_set_parameters_lvar_state(
const rb_iseq_t *iseq);
500static int compile_hash(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *node,
int method_call_keywords,
int popped);
507verify_list(ISEQ_ARG_DECLARE
const char *info,
LINK_ANCHOR *
const anchor)
513 if (!compile_debug)
return;
515 list = anchor->anchor.next;
516 plist = &anchor->anchor;
518 if (plist != list->prev) {
525 if (anchor->last != plist && anchor->last != 0) {
530 rb_bug(
"list verify error: %08x (%s)", flag, info);
535#define verify_list(info, anchor) verify_list(iseq, (info), (anchor))
542 VALUE *original = rb_iseq_original_iseq(iseq);
544 while (i < ISEQ_BODY(iseq)->iseq_size) {
545 VALUE insn = original[i];
546 const char *types = insn_op_types(insn);
548 for (
int j=0; types[j]; j++) {
549 if (types[j] == TS_CALLDATA) {
553 if (cc != vm_cc_empty()) {
555 rb_bug(
"call cache is not initialized by vm_cc_empty()");
562 for (
unsigned int i=0; i<ISEQ_BODY(iseq)->ci_size; i++) {
563 struct rb_call_data *cd = &ISEQ_BODY(iseq)->call_data[i];
566 if (cc != NULL && cc != vm_cc_empty()) {
568 rb_bug(
"call cache is not initialized by vm_cc_empty()");
580 elem->prev = anchor->last;
581 anchor->last->next = elem;
583 verify_list(
"add", anchor);
593 elem->next = before->next;
594 elem->next->prev = elem;
596 if (before == anchor->last) anchor->last = elem;
597 verify_list(
"add", anchor);
600#define ADD_ELEM(anchor, elem) ADD_ELEM(iseq, (anchor), (elem))
601#define APPEND_ELEM(anchor, before, elem) APPEND_ELEM(iseq, (anchor), (before), (elem))
605branch_coverage_valid_p(
rb_iseq_t *iseq,
int first_line)
607 if (!ISEQ_COVERAGE(iseq))
return 0;
608 if (!ISEQ_BRANCH_COVERAGE(iseq))
return 0;
609 if (first_line <= 0)
return 0;
616 const int first_lineno = loc->beg_pos.lineno, first_column = loc->beg_pos.column;
617 const int last_lineno = loc->end_pos.lineno, last_column = loc->end_pos.column;
620 rb_hash_aset(structure, key, branch);
632 if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno))
return Qundef;
643 VALUE branch_base = rb_hash_aref(structure, key);
646 if (
NIL_P(branch_base)) {
647 branch_base = setup_branch(loc,
type, structure, key);
648 branches = rb_hash_new();
660generate_dummy_line_node(
int lineno,
int node_id)
663 nd_set_line(&dummy, lineno);
664 nd_set_node_id(&dummy, node_id);
671 if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno))
return;
682 VALUE branch = rb_hash_aref(branches, key);
686 branch = setup_branch(loc,
type, branches, key);
696 ADD_TRACE_WITH_DATA(seq, RUBY_EVENT_COVERAGE_BRANCH, counter_idx);
697 ADD_SYNTHETIC_INSN(seq, loc->end_pos.lineno, node_id, nop);
700#define ISEQ_LAST_LINE(iseq) (ISEQ_COMPILE_DATA(iseq)->last_line)
703validate_label(st_data_t name, st_data_t label, st_data_t arg)
707 if (!lobj->link.next) {
709 COMPILE_ERROR(iseq, lobj->position,
710 "%"PRIsVALUE
": undefined label",
720 st_foreach(labels_table, validate_label, (st_data_t)iseq);
721 st_free_table(labels_table);
725get_nd_recv(
const NODE *node)
727 switch (nd_type(node)) {
729 return RNODE_CALL(node)->nd_recv;
731 return RNODE_OPCALL(node)->nd_recv;
735 return RNODE_QCALL(node)->nd_recv;
739 return RNODE_ATTRASGN(node)->nd_recv;
741 return RNODE_OP_ASGN1(node)->nd_recv;
743 return RNODE_OP_ASGN2(node)->nd_recv;
745 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
750get_node_call_nd_mid(
const NODE *node)
752 switch (nd_type(node)) {
754 return RNODE_CALL(node)->nd_mid;
756 return RNODE_OPCALL(node)->nd_mid;
758 return RNODE_FCALL(node)->nd_mid;
760 return RNODE_QCALL(node)->nd_mid;
762 return RNODE_VCALL(node)->nd_mid;
764 return RNODE_ATTRASGN(node)->nd_mid;
766 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
771get_nd_args(
const NODE *node)
773 switch (nd_type(node)) {
775 return RNODE_CALL(node)->nd_args;
777 return RNODE_OPCALL(node)->nd_args;
779 return RNODE_FCALL(node)->nd_args;
781 return RNODE_QCALL(node)->nd_args;
785 return RNODE_ATTRASGN(node)->nd_args;
787 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
792get_node_colon_nd_mid(
const NODE *node)
794 switch (nd_type(node)) {
796 return RNODE_COLON2(node)->nd_mid;
798 return RNODE_COLON3(node)->nd_mid;
800 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
805get_nd_vid(
const NODE *node)
807 switch (nd_type(node)) {
809 return RNODE_LASGN(node)->nd_vid;
811 return RNODE_DASGN(node)->nd_vid;
813 return RNODE_IASGN(node)->nd_vid;
815 return RNODE_CVASGN(node)->nd_vid;
817 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
822get_nd_value(
const NODE *node)
824 switch (nd_type(node)) {
826 return RNODE_LASGN(node)->nd_value;
828 return RNODE_DASGN(node)->nd_value;
830 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
835get_string_value(
const NODE *node)
837 switch (nd_type(node)) {
839 return RB_OBJ_SET_SHAREABLE(rb_node_str_string_val(node));
841 return RB_OBJ_SET_SHAREABLE(rb_node_file_path_val(node));
843 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
853 (*ifunc->func)(iseq, ret, ifunc->data);
855 ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
857 CHECK(iseq_setup_insn(iseq, ret));
858 return iseq_setup(iseq, ret);
861static bool drop_unreachable_return(
LINK_ANCHOR *ret);
870 NO_CHECK(COMPILE(ret,
"nil", node));
871 iseq_set_local_table(iseq, 0, 0);
874 else if (nd_type_p(node, NODE_SCOPE)) {
876 iseq_set_local_table(iseq, RNODE_SCOPE(node)->nd_tbl, (
NODE *)RNODE_SCOPE(node)->nd_args);
877 iseq_set_arguments(iseq, ret, (
NODE *)RNODE_SCOPE(node)->nd_args);
878 iseq_set_parameters_lvar_state(iseq);
880 switch (ISEQ_BODY(iseq)->
type) {
881 case ISEQ_TYPE_BLOCK:
883 LABEL *start = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(0);
884 LABEL *end = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(0);
886 start->rescued = LABEL_RESCUE_BEG;
887 end->rescued = LABEL_RESCUE_END;
890 ADD_SYNTHETIC_INSN(ret, ISEQ_BODY(iseq)->location.first_lineno, -1, nop);
891 ADD_LABEL(ret, start);
892 CHECK(COMPILE(ret,
"block body", RNODE_SCOPE(node)->nd_body));
895 ISEQ_COMPILE_DATA(iseq)->last_line = ISEQ_BODY(iseq)->location.code_location.end_pos.lineno;
898 ADD_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
899 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, start, end, NULL, end);
902 case ISEQ_TYPE_CLASS:
905 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
907 ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
910 case ISEQ_TYPE_METHOD:
912 ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
914 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
915 ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
917 ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
921 CHECK(COMPILE(ret,
"scoped node", RNODE_SCOPE(node)->nd_body));
928#define INVALID_ISEQ_TYPE(type) \
929 ISEQ_TYPE_##type: m = #type; goto invalid_iseq_type
930 switch (ISEQ_BODY(iseq)->
type) {
931 case INVALID_ISEQ_TYPE(
METHOD);
932 case INVALID_ISEQ_TYPE(CLASS);
933 case INVALID_ISEQ_TYPE(BLOCK);
934 case INVALID_ISEQ_TYPE(EVAL);
935 case INVALID_ISEQ_TYPE(MAIN);
936 case INVALID_ISEQ_TYPE(TOP);
937#undef INVALID_ISEQ_TYPE
938 case ISEQ_TYPE_RESCUE:
939 iseq_set_exception_local_table(iseq);
940 CHECK(COMPILE(ret,
"rescue", node));
942 case ISEQ_TYPE_ENSURE:
943 iseq_set_exception_local_table(iseq);
944 CHECK(COMPILE_POPPED(ret,
"ensure", node));
946 case ISEQ_TYPE_PLAIN:
947 CHECK(COMPILE(ret,
"ensure", node));
950 COMPILE_ERROR(ERROR_ARGS
"unknown scope: %d", ISEQ_BODY(iseq)->
type);
953 COMPILE_ERROR(ERROR_ARGS
"compile/ISEQ_TYPE_%s should not be reached", m);
958 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE || ISEQ_BODY(iseq)->
type == ISEQ_TYPE_ENSURE) {
959 NODE dummy_line_node = generate_dummy_line_node(0, -1);
960 ADD_GETLOCAL(ret, &dummy_line_node, LVAR_ERRINFO, 0);
961 ADD_INSN1(ret, &dummy_line_node,
throw,
INT2FIX(0) );
963 else if (!drop_unreachable_return(ret)) {
964 ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
968 if (ISEQ_COMPILE_DATA(iseq)->labels_table) {
969 st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
970 ISEQ_COMPILE_DATA(iseq)->labels_table = 0;
971 validate_labels(iseq, labels_table);
974 CHECK(iseq_setup_insn(iseq, ret));
975 return iseq_setup(iseq, ret);
979rb_iseq_translate_threaded_code(
rb_iseq_t *iseq)
981#if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
982 const void *
const *table = rb_vm_get_insns_address_table();
984 VALUE *encoded = (
VALUE *)ISEQ_BODY(iseq)->iseq_encoded;
986 for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; ) {
987 int insn = (int)ISEQ_BODY(iseq)->iseq_encoded[i];
988 int len = insn_len(insn);
989 encoded[i] = (
VALUE)table[insn];
996 rb_yjit_live_iseq_count++;
997 rb_yjit_iseq_alloc_count++;
1004rb_iseq_original_iseq(
const rb_iseq_t *iseq)
1006 VALUE *original_code;
1008 if (ISEQ_ORIGINAL_ISEQ(iseq))
return ISEQ_ORIGINAL_ISEQ(iseq);
1009 original_code = ISEQ_ORIGINAL_ISEQ_ALLOC(iseq, ISEQ_BODY(iseq)->iseq_size);
1010 MEMCPY(original_code, ISEQ_BODY(iseq)->iseq_encoded,
VALUE, ISEQ_BODY(iseq)->iseq_size);
1012#if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
1016 for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; ) {
1017 const void *addr = (
const void *)original_code[i];
1018 const int insn = rb_vm_insn_addr2insn(addr);
1020 original_code[i] = insn;
1021 i += insn_len(insn);
1025 return original_code;
1032#if defined(HAVE_TRUE_LONG_LONG) && SIZEOF_LONG_LONG > SIZEOF_VALUE
1033# define ALIGNMENT_SIZE SIZEOF_LONG_LONG
1035# define ALIGNMENT_SIZE SIZEOF_VALUE
1037#define PADDING_SIZE_MAX ((size_t)((ALIGNMENT_SIZE) - 1))
1039#define ALIGNMENT_SIZE_OF(type) alignment_size_assert(RUBY_ALIGNOF(type), #type)
1042alignment_size_assert(
size_t align,
const char *
type)
1045 "ALIGNMENT_SIZE_OF(%s):%zd == (2 ** N) is expected",
type, align);
1051calc_padding(
void *ptr,
size_t align)
1056 mis = (size_t)ptr & (align - 1);
1058 padding = align - mis;
1069 size_t padding = calc_padding((
void *)&storage->buff[storage->pos], align);
1071 if (size >= INT_MAX - padding) rb_memerror();
1072 if (storage->pos + size + padding > storage->size) {
1073 unsigned int alloc_size = storage->size;
1075 while (alloc_size < size + PADDING_SIZE_MAX) {
1076 if (alloc_size >= INT_MAX / 2) rb_memerror();
1079 storage->next = (
void *)
ALLOC_N(
char, alloc_size +
1081 storage = *arena = storage->next;
1084 storage->size = alloc_size;
1085 padding = calc_padding((
void *)&storage->buff[storage->pos], align);
1088 storage->pos += (int)padding;
1090 ptr = (
void *)&storage->buff[storage->pos];
1091 storage->pos += (int)size;
1096compile_data_alloc(
rb_iseq_t *iseq,
size_t size,
size_t align)
1099 return compile_data_alloc_with_arena(arena, size, align);
1102#define compile_data_alloc_type(iseq, type) \
1103 (type *)compile_data_alloc(iseq, sizeof(type), ALIGNMENT_SIZE_OF(type))
1106compile_data_alloc2(
rb_iseq_t *iseq,
size_t elsize,
size_t num,
size_t align)
1109 return compile_data_alloc(iseq, size, align);
1112#define compile_data_alloc2_type(iseq, type, num) \
1113 (type *)compile_data_alloc2(iseq, sizeof(type), num, ALIGNMENT_SIZE_OF(type))
1116compile_data_calloc2(
rb_iseq_t *iseq,
size_t elsize,
size_t num,
size_t align)
1119 void *p = compile_data_alloc(iseq, size, align);
1124#define compile_data_calloc2_type(iseq, type, num) \
1125 (type *)compile_data_calloc2(iseq, sizeof(type), num, ALIGNMENT_SIZE_OF(type))
1131 return (
INSN *)compile_data_alloc_with_arena(arena,
sizeof(
INSN), ALIGNMENT_SIZE_OF(
INSN));
1135compile_data_alloc_label(
rb_iseq_t *iseq)
1137 return compile_data_alloc_type(iseq,
LABEL);
1141compile_data_alloc_adjust(
rb_iseq_t *iseq)
1143 return compile_data_alloc_type(iseq,
ADJUST);
1147compile_data_alloc_trace(
rb_iseq_t *iseq)
1149 return compile_data_alloc_type(iseq,
TRACE);
1158 elem2->next = elem1->next;
1159 elem2->prev = elem1;
1160 elem1->next = elem2;
1162 elem2->next->prev = elem2;
1172 elem2->prev = elem1->prev;
1173 elem2->next = elem1;
1174 elem1->prev = elem2;
1176 elem2->prev->next = elem2;
1186 elem2->prev = elem1->prev;
1187 elem2->next = elem1->next;
1189 elem1->prev->next = elem2;
1192 elem1->next->prev = elem2;
1199 elem->prev->next = elem->next;
1201 elem->next->prev = elem->prev;
1208 return anchor->anchor.next;
1214 return anchor->last;
1221 switch (elem->type) {
1222 case ISEQ_ELEMENT_INSN:
1223 case ISEQ_ELEMENT_ADJUST:
1233LIST_INSN_SIZE_ONE(
const LINK_ANCHOR *
const anchor)
1235 LINK_ELEMENT *first_insn = ELEM_FIRST_INSN(FIRST_ELEMENT(anchor));
1236 if (first_insn != NULL &&
1237 ELEM_FIRST_INSN(first_insn->next) == NULL) {
1246LIST_INSN_SIZE_ZERO(
const LINK_ANCHOR *
const anchor)
1248 if (ELEM_FIRST_INSN(FIRST_ELEMENT(anchor)) == NULL) {
1266 if (anc2->anchor.next) {
1269 anc1->last->next = anc2->anchor.next;
1270 anc2->anchor.next->prev = anc1->last;
1271 anc1->last = anc2->last;
1276 verify_list(
"append", anc1);
1279#define APPEND_LIST(anc1, anc2) APPEND_LIST(iseq, (anc1), (anc2))
1288 printf(
"anch: %p, frst: %p, last: %p\n", (
void *)&anchor->anchor,
1289 (
void *)anchor->anchor.next, (
void *)anchor->last);
1291 printf(
"curr: %p, next: %p, prev: %p, type: %d\n", (
void *)list, (
void *)list->next,
1292 (
void *)list->prev, (
int)list->type);
1297 dump_disasm_list_with_cursor(anchor->anchor.next, cur, 0);
1298 verify_list(
"debug list", anchor);
1301#define debug_list(anc, cur) debug_list(iseq, (anc), (cur))
1304#define debug_list(anc, cur) ((void)0)
1310 TRACE *trace = compile_data_alloc_trace(iseq);
1312 trace->link.type = ISEQ_ELEMENT_TRACE;
1313 trace->link.next = NULL;
1314 trace->event = event;
1321new_label_body(
rb_iseq_t *iseq,
long line)
1323 LABEL *labelobj = compile_data_alloc_label(iseq);
1325 labelobj->link.type = ISEQ_ELEMENT_LABEL;
1326 labelobj->link.next = 0;
1328 labelobj->label_no = ISEQ_COMPILE_DATA(iseq)->label_no++;
1329 labelobj->sc_state = 0;
1331 labelobj->refcnt = 0;
1333 labelobj->rescued = LABEL_RESCUE_NONE;
1334 labelobj->unremovable = 0;
1335 labelobj->position = -1;
1342 ADJUST *adjust = compile_data_alloc_adjust(iseq);
1343 adjust->link.type = ISEQ_ELEMENT_ADJUST;
1344 adjust->link.next = 0;
1345 adjust->label = label;
1346 adjust->line_no = line;
1347 LABEL_UNREMOVABLE(label);
1354 const char *types = insn_op_types(insn->insn_id);
1355 for (
int j = 0; types[j]; j++) {
1356 char type = types[j];
1363 func(&OPERAND_AT(insn, j), data);
1372iseq_insn_each_object_write_barrier(
VALUE * obj,
VALUE iseq)
1381new_insn_core(
rb_iseq_t *iseq,
int line_no,
int node_id,
int insn_id,
int argc,
VALUE *argv)
1383 INSN *iobj = compile_data_alloc_insn(iseq);
1387 iobj->link.type = ISEQ_ELEMENT_INSN;
1388 iobj->link.next = 0;
1389 iobj->insn_id = insn_id;
1390 iobj->insn_info.line_no = line_no;
1391 iobj->insn_info.node_id = node_id;
1392 iobj->insn_info.events = 0;
1393 iobj->operands = argv;
1394 iobj->operand_size = argc;
1397 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_write_barrier, (
VALUE)iseq);
1403new_insn_body(
rb_iseq_t *iseq,
int line_no,
int node_id,
enum ruby_vminsn_type insn_id,
int argc, ...)
1405 VALUE *operands = 0;
1409 va_start(argv, argc);
1410 operands = compile_data_alloc2_type(iseq,
VALUE, argc);
1411 for (i = 0; i < argc; i++) {
1417 return new_insn_core(iseq, line_no, node_id, insn_id, argc, operands);
1421insn_replace_with_operands(
rb_iseq_t *iseq,
INSN *iobj,
enum ruby_vminsn_type insn_id,
int argc, ...)
1423 VALUE *operands = 0;
1427 va_start(argv, argc);
1428 operands = compile_data_alloc2_type(iseq,
VALUE, argc);
1429 for (i = 0; i < argc; i++) {
1436 iobj->insn_id = insn_id;
1437 iobj->operand_size = argc;
1438 iobj->operands = operands;
1439 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_write_barrier, (
VALUE)iseq);
1447 VM_ASSERT(argc >= 0);
1450 flag |= VM_CALL_KWARG;
1451 argc += kw_arg->keyword_len;
1454 if (!(flag & (VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_BLOCKARG | VM_CALL_KWARG | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING))
1455 && !has_blockiseq) {
1456 flag |= VM_CALL_ARGS_SIMPLE;
1459 ISEQ_BODY(iseq)->ci_size++;
1460 const struct rb_callinfo *ci = vm_ci_new(mid, flag, argc, kw_arg);
1468 VALUE *operands = compile_data_calloc2_type(iseq,
VALUE, 2);
1471 operands[1] = (
VALUE)blockiseq;
1478 if (vm_ci_flag((
struct rb_callinfo *)ci) & VM_CALL_FORWARDING) {
1479 insn = new_insn_core(iseq, line_no, node_id, BIN(sendforward), 2, operands);
1482 insn = new_insn_core(iseq, line_no, node_id, BIN(send), 2, operands);
1495 VALUE ast_value = rb_ruby_ast_new(node);
1497 debugs(
"[new_child_iseq]> ---------------------------------------\n");
1498 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1499 ret_iseq = rb_iseq_new_with_opt(ast_value, name,
1500 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1502 isolated_depth ? isolated_depth + 1 : 0,
1503 type, ISEQ_COMPILE_DATA(iseq)->option,
1504 ISEQ_BODY(iseq)->variable.script_lines);
1505 debugs(
"[new_child_iseq]< ---------------------------------------\n");
1515 debugs(
"[new_child_iseq_with_callback]> ---------------------------------------\n");
1516 ret_iseq = rb_iseq_new_with_callback(ifunc, name,
1517 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1518 line_no, parent,
type, ISEQ_COMPILE_DATA(iseq)->option);
1519 debugs(
"[new_child_iseq_with_callback]< ---------------------------------------\n");
1527 ISEQ_COMPILE_DATA(iseq)->catch_except_p =
true;
1528 if (ISEQ_BODY(iseq)->parent_iseq != NULL) {
1529 if (ISEQ_COMPILE_DATA(ISEQ_BODY(iseq)->parent_iseq)) {
1530 set_catch_except_p((
rb_iseq_t *) ISEQ_BODY(iseq)->parent_iseq);
1553 while (pos < body->iseq_size) {
1554 insn = rb_vm_insn_decode(body->iseq_encoded[pos]);
1555 if (insn == BIN(
throw)) {
1556 set_catch_except_p(iseq);
1559 pos += insn_len(insn);
1565 for (i = 0; i < ct->size; i++) {
1567 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1568 if (entry->type != CATCH_TYPE_BREAK
1569 && entry->type != CATCH_TYPE_NEXT
1570 && entry->type != CATCH_TYPE_REDO) {
1572 ISEQ_COMPILE_DATA(iseq)->catch_except_p =
true;
1579iseq_insert_nop_between_end_and_cont(
rb_iseq_t *iseq)
1581 VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
1582 if (
NIL_P(catch_table_ary))
return;
1583 unsigned int i, tlen = (
unsigned int)
RARRAY_LEN(catch_table_ary);
1585 for (i = 0; i < tlen; i++) {
1591 enum rb_catch_type ct = (
enum rb_catch_type)(ptr[0] & 0xffff);
1593 if (ct != CATCH_TYPE_BREAK
1594 && ct != CATCH_TYPE_NEXT
1595 && ct != CATCH_TYPE_REDO) {
1597 for (e = end; e && (IS_LABEL(e) || IS_TRACE(e)); e = e->next) {
1599 INSN *nop = new_insn_core(iseq, 0, -1, BIN(nop), 0, 0);
1600 ELEM_INSERT_NEXT(end, &nop->link);
1613 if (
RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1618 if (compile_debug > 5)
1619 dump_disasm_list(FIRST_ELEMENT(anchor));
1621 debugs(
"[compile step 3.1 (iseq_optimize)]\n");
1622 iseq_optimize(iseq, anchor);
1624 if (compile_debug > 5)
1625 dump_disasm_list(FIRST_ELEMENT(anchor));
1627 if (ISEQ_COMPILE_DATA(iseq)->option->instructions_unification) {
1628 debugs(
"[compile step 3.2 (iseq_insns_unification)]\n");
1629 iseq_insns_unification(iseq, anchor);
1630 if (compile_debug > 5)
1631 dump_disasm_list(FIRST_ELEMENT(anchor));
1634 debugs(
"[compile step 3.4 (iseq_insert_nop_between_end_and_cont)]\n");
1635 iseq_insert_nop_between_end_and_cont(iseq);
1636 if (compile_debug > 5)
1637 dump_disasm_list(FIRST_ELEMENT(anchor));
1645 if (
RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1648 debugs(
"[compile step 4.1 (iseq_set_sequence)]\n");
1649 if (!iseq_set_sequence(iseq, anchor))
return COMPILE_NG;
1650 if (compile_debug > 5)
1651 dump_disasm_list(FIRST_ELEMENT(anchor));
1653 debugs(
"[compile step 4.2 (iseq_set_exception_table)]\n");
1654 if (!iseq_set_exception_table(iseq))
return COMPILE_NG;
1656 debugs(
"[compile step 4.3 (set_optargs_table)] \n");
1657 if (!iseq_set_optargs_table(iseq))
return COMPILE_NG;
1659 debugs(
"[compile step 5 (iseq_translate_threaded_code)] \n");
1660 if (!rb_iseq_translate_threaded_code(iseq))
return COMPILE_NG;
1662 debugs(
"[compile step 6 (update_catch_except_flags)] \n");
1664 update_catch_except_flags(iseq, ISEQ_BODY(iseq));
1666 debugs(
"[compile step 6.1 (remove unused catch tables)] \n");
1668 if (!ISEQ_COMPILE_DATA(iseq)->catch_except_p && ISEQ_BODY(iseq)->catch_table) {
1669 ruby_xfree_sized(ISEQ_BODY(iseq)->catch_table, iseq_catch_table_bytes(ISEQ_BODY(iseq)->catch_table->size));
1670 ISEQ_BODY(iseq)->catch_table = NULL;
1673#if VM_INSN_INFO_TABLE_IMPL == 2
1674 if (ISEQ_BODY(iseq)->insns_info.succ_index_table == NULL) {
1675 debugs(
"[compile step 7 (rb_iseq_insns_info_encode_positions)] \n");
1676 rb_iseq_insns_info_encode_positions(iseq);
1680 if (compile_debug > 1) {
1681 VALUE str = rb_iseq_disasm(iseq);
1684 verify_call_cache(iseq);
1685 debugs(
"[compile step: finish]\n");
1691iseq_set_exception_local_table(
rb_iseq_t *iseq)
1693 ISEQ_BODY(iseq)->local_table_size = numberof(rb_iseq_shared_exc_local_tbl);
1694 ISEQ_BODY(iseq)->local_table = rb_iseq_shared_exc_local_tbl;
1695 ISEQ_BODY(iseq)->lvar_states = NULL;
1703 while (iseq != ISEQ_BODY(iseq)->local_iseq) {
1705 iseq = ISEQ_BODY(iseq)->parent_iseq;
1711get_dyna_var_idx_at_raw(
const rb_iseq_t *iseq,
ID id)
1715 for (i = 0; i < ISEQ_BODY(iseq)->local_table_size; i++) {
1716 if (ISEQ_BODY(iseq)->local_table[i] == id) {
1726 int idx = get_dyna_var_idx_at_raw(ISEQ_BODY(iseq)->local_iseq,
id);
1729 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
1730 "get_local_var_idx: %d", idx);
1737get_dyna_var_idx(
const rb_iseq_t *iseq,
ID id,
int *level,
int *ls)
1739 int lv = 0, idx = -1;
1740 const rb_iseq_t *
const topmost_iseq = iseq;
1743 idx = get_dyna_var_idx_at_raw(iseq,
id);
1747 iseq = ISEQ_BODY(iseq)->parent_iseq;
1752 COMPILE_ERROR(topmost_iseq, ISEQ_LAST_LINE(topmost_iseq),
1753 "get_dyna_var_idx: -1");
1757 *ls = ISEQ_BODY(iseq)->local_table_size;
1762iseq_local_block_param_p(
const rb_iseq_t *iseq,
unsigned int idx,
unsigned int level)
1766 iseq = ISEQ_BODY(iseq)->parent_iseq;
1769 body = ISEQ_BODY(iseq);
1770 if (body->local_iseq == iseq &&
1771 body->param.flags.has_block &&
1772 body->local_table_size - body->param.block_start == idx) {
1781iseq_block_param_id_p(
const rb_iseq_t *iseq,
ID id,
int *pidx,
int *plevel)
1784 int idx = get_dyna_var_idx(iseq,
id, &level, &ls);
1785 if (iseq_local_block_param_p(iseq, ls - idx, level)) {
1796access_outer_variables(
const rb_iseq_t *iseq,
int level,
ID id,
bool write)
1798 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1800 if (isolated_depth && level >= isolated_depth) {
1801 if (
id == rb_intern(
"yield")) {
1802 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
"can not yield from isolated Proc");
1805 COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
"can not access variable '%s' from isolated Proc", rb_id2name(
id));
1809 for (
int i=0; i<level; i++) {
1811 struct rb_id_table *ovs = ISEQ_BODY(iseq)->outer_variables;
1814 ovs = ISEQ_BODY(iseq)->outer_variables = rb_id_table_create(8);
1817 if (rb_id_table_lookup(ISEQ_BODY(iseq)->outer_variables,
id, &val)) {
1818 if (write && !val) {
1819 rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables,
id,
Qtrue);
1823 rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables,
id, RBOOL(write));
1826 iseq = ISEQ_BODY(iseq)->parent_iseq;
1831iseq_lvar_id(
const rb_iseq_t *iseq,
int idx,
int level)
1833 for (
int i=0; i<level; i++) {
1834 iseq = ISEQ_BODY(iseq)->parent_iseq;
1837 ID id = ISEQ_BODY(iseq)->local_table[ISEQ_BODY(iseq)->local_table_size - idx];
1843update_lvar_state(
const rb_iseq_t *iseq,
int level,
int idx)
1845 for (
int i=0; i<level; i++) {
1846 iseq = ISEQ_BODY(iseq)->parent_iseq;
1849 enum lvar_state *states = ISEQ_BODY(iseq)->lvar_states;
1850 int table_idx = ISEQ_BODY(iseq)->local_table_size - idx;
1851 switch (states[table_idx]) {
1852 case lvar_uninitialized:
1853 states[table_idx] = lvar_initialized;
1855 case lvar_initialized:
1856 states[table_idx] = lvar_reassigned;
1858 case lvar_reassigned:
1862 rb_bug(
"unreachable");
1867iseq_set_parameters_lvar_state(
const rb_iseq_t *iseq)
1869 for (
unsigned int i=0; i<ISEQ_BODY(iseq)->param.size; i++) {
1870 ISEQ_BODY(iseq)->lvar_states[i] = lvar_initialized;
1873 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
1874 int opt_num = ISEQ_BODY(iseq)->param.opt_num;
1875 for (
int i=0; i<opt_num; i++) {
1876 ISEQ_BODY(iseq)->lvar_states[lead_num + i] = lvar_uninitialized;
1885 if (iseq_local_block_param_p(iseq, idx, level)) {
1886 ADD_INSN2(seq, line_node, getblockparam,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1889 ADD_INSN2(seq, line_node, getlocal,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1891 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level),
Qfalse);
1897 if (iseq_local_block_param_p(iseq, idx, level)) {
1898 ADD_INSN2(seq, line_node, setblockparam,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1901 ADD_INSN2(seq, line_node, setlocal,
INT2FIX((idx) + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
1903 update_lvar_state(iseq, level, idx);
1904 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level),
Qtrue);
1913 if (body->param.flags.has_opt ||
1914 body->param.flags.has_post ||
1915 body->param.flags.has_rest ||
1916 body->param.flags.has_block ||
1917 body->param.flags.has_kw ||
1918 body->param.flags.has_kwrest) {
1920 if (body->param.flags.has_block) {
1921 body->param.size = body->param.block_start + 1;
1923 else if (body->param.flags.has_kwrest) {
1924 body->param.size = body->param.keyword->rest_start + 1;
1926 else if (body->param.flags.has_kw) {
1927 body->param.size = body->param.keyword->bits_start + 1;
1929 else if (body->param.flags.has_post) {
1930 body->param.size = body->param.post_start + body->param.post_num;
1932 else if (body->param.flags.has_rest) {
1933 body->param.size = body->param.rest_start + 1;
1935 else if (body->param.flags.has_opt) {
1936 body->param.size = body->param.lead_num + body->param.opt_num;
1943 body->param.size = body->param.lead_num;
1953 struct rb_iseq_param_keyword *keyword;
1956 int kw = 0, rkw = 0, di = 0, i;
1958 body->param.flags.has_kw = TRUE;
1959 body->param.keyword = keyword =
ZALLOC_N(
struct rb_iseq_param_keyword, 1);
1963 node = node->nd_next;
1966 keyword->bits_start = arg_size++;
1968 node = args->kw_args;
1970 const NODE *val_node = get_nd_value(node->nd_body);
1973 if (val_node == NODE_SPECIAL_REQUIRED_KEYWORD) {
1977 switch (nd_type(val_node)) {
1979 dv = rb_node_sym_string_val(val_node);
1982 dv = rb_node_regx_string_val(val_node);
1985 dv = rb_node_line_lineno_val(val_node);
1988 dv = rb_node_integer_literal_val(val_node);
1991 dv = rb_node_float_literal_val(val_node);
1994 dv = rb_node_rational_literal_val(val_node);
1996 case NODE_IMAGINARY:
1997 dv = rb_node_imaginary_literal_val(val_node);
2000 dv = rb_node_encoding_val(val_node);
2012 NO_CHECK(COMPILE_POPPED(optargs,
"kwarg", RNODE(node)));
2016 keyword->num = ++di;
2020 node = node->nd_next;
2025 if (RNODE_DVAR(args->kw_rest_arg)->nd_vid != 0) {
2026 ID kw_id = ISEQ_BODY(iseq)->local_table[arg_size];
2027 keyword->rest_start = arg_size++;
2028 body->param.flags.has_kwrest = TRUE;
2030 if (kw_id == idPow) body->param.flags.anon_kwrest = TRUE;
2032 keyword->required_num = rkw;
2033 keyword->table = &body->local_table[keyword->bits_start - keyword->num];
2038 for (i = 0; i <
RARRAY_LEN(default_values); i++) {
2040 if (dv == complex_mark) dv =
Qundef;
2045 keyword->default_values = dvs;
2054 if (!body->param.flags.use_block) {
2055 body->param.flags.use_block = 1;
2060 st_data_t key = (st_data_t)rb_intern_str(body->location.label);
2061 set_insert(&vm->unused_block_warning_table, key);
2069 debugs(
"iseq_set_arguments: %s\n", node_args ?
"" :
"0");
2073 const struct rb_args_info *
const args = &RNODE_ARGS(node_args)->nd_ainfo;
2079 EXPECT_NODE(
"iseq_set_arguments", node_args, NODE_ARGS, COMPILE_NG);
2081 body->param.lead_num = arg_size = (int)args->pre_args_num;
2082 if (body->param.lead_num > 0) body->param.flags.has_lead = TRUE;
2083 debugs(
" - argc: %d\n", body->param.lead_num);
2085 rest_id = args->rest_arg;
2086 if (rest_id == NODE_SPECIAL_EXCESSIVE_COMMA) {
2090 block_id = args->block_arg;
2092 bool optimized_forward = (args->forwarding && args->pre_args_num == 0 && !args->opt_args);
2094 if (optimized_forward) {
2099 if (args->opt_args) {
2107 label = NEW_LABEL(nd_line(RNODE(node)));
2109 ADD_LABEL(optargs, label);
2110 NO_CHECK(COMPILE_POPPED(optargs,
"optarg", node->nd_body));
2111 node = node->nd_next;
2116 label = NEW_LABEL(nd_line(node_args));
2118 ADD_LABEL(optargs, label);
2123 for (j = 0; j < i+1; j++) {
2128 body->param.flags.has_opt = TRUE;
2129 body->param.opt_num = i;
2130 body->param.opt_table = opt_table;
2135 body->param.rest_start = arg_size++;
2136 body->param.flags.has_rest = TRUE;
2137 if (rest_id ==
'*') body->param.flags.anon_rest = TRUE;
2141 if (args->first_post_arg) {
2142 body->param.post_start = arg_size;
2143 body->param.post_num = args->post_args_num;
2144 body->param.flags.has_post = TRUE;
2145 arg_size += args->post_args_num;
2147 if (body->param.flags.has_rest) {
2148 body->param.post_start = body->param.rest_start + 1;
2152 if (args->kw_args) {
2153 arg_size = iseq_set_arguments_keywords(iseq, optargs, args, arg_size);
2155 else if (args->kw_rest_arg && !optimized_forward) {
2156 ID kw_id = ISEQ_BODY(iseq)->local_table[arg_size];
2157 struct rb_iseq_param_keyword *keyword =
ZALLOC_N(
struct rb_iseq_param_keyword, 1);
2158 keyword->rest_start = arg_size++;
2159 body->param.keyword = keyword;
2160 body->param.flags.has_kwrest = TRUE;
2162 static ID anon_kwrest = 0;
2163 if (!anon_kwrest) anon_kwrest = rb_intern(
"**");
2164 if (kw_id == anon_kwrest) body->param.flags.anon_kwrest = TRUE;
2166 else if (args->no_kwarg) {
2167 body->param.flags.accepts_no_kwarg = TRUE;
2170 if (args->no_blockarg) {
2171 body->param.flags.accepts_no_block = TRUE;
2173 else if (block_id) {
2174 body->param.block_start = arg_size++;
2175 body->param.flags.has_block = TRUE;
2176 iseq_set_use_block(iseq);
2180 if (optimized_forward) {
2181 body->param.flags.use_block = 1;
2182 body->param.flags.forwardable = TRUE;
2186 iseq_calc_param_size(iseq);
2187 body->param.size = arg_size;
2189 if (args->pre_init) {
2190 NO_CHECK(COMPILE_POPPED(optargs,
"init arguments (m)", args->pre_init));
2192 if (args->post_init) {
2193 NO_CHECK(COMPILE_POPPED(optargs,
"init arguments (p)", args->post_init));
2196 if (body->type == ISEQ_TYPE_BLOCK) {
2197 if (body->param.flags.has_opt == FALSE &&
2198 body->param.flags.has_post == FALSE &&
2199 body->param.flags.has_rest == FALSE &&
2200 body->param.flags.has_kw == FALSE &&
2201 body->param.flags.has_kwrest == FALSE) {
2203 if (body->param.lead_num == 1 && last_comma == 0) {
2205 body->param.flags.ambiguous_param0 = TRUE;
2217 unsigned int size = tbl ? tbl->size : 0;
2218 unsigned int offset = 0;
2221 struct rb_args_info *args = &RNODE_ARGS(node_args)->nd_ainfo;
2226 if (args->forwarding && args->pre_args_num == 0 && !args->opt_args) {
2235 MEMCPY(ids, tbl->ids + offset,
ID, size);
2236 ISEQ_BODY(iseq)->local_table = ids;
2238 enum lvar_state *states =
ALLOC_N(
enum lvar_state, size);
2240 for (
unsigned int i=0; i<size; i++) {
2241 states[i] = lvar_uninitialized;
2244 ISEQ_BODY(iseq)->lvar_states = states;
2246 ISEQ_BODY(iseq)->local_table_size = size;
2248 debugs(
"iseq_set_local_table: %u\n", ISEQ_BODY(iseq)->local_table_size);
2260 else if ((tlit = OBJ_BUILTIN_TYPE(lit)) == -1) {
2263 else if ((tval = OBJ_BUILTIN_TYPE(val)) == -1) {
2266 else if (tlit != tval) {
2276 long x =
FIX2LONG(rb_big_cmp(lit, val));
2284 return rb_float_cmp(lit, val);
2287 const struct RRational *rat1 = RRATIONAL(val);
2288 const struct RRational *rat2 = RRATIONAL(lit);
2289 return rb_iseq_cdhash_cmp(rat1->num, rat2->num) || rb_iseq_cdhash_cmp(rat1->den, rat2->den);
2292 const struct RComplex *comp1 = RCOMPLEX(val);
2293 const struct RComplex *comp2 = RCOMPLEX(lit);
2294 return rb_iseq_cdhash_cmp(comp1->real, comp2->real) || rb_iseq_cdhash_cmp(comp1->imag, comp2->imag);
2297 return rb_reg_equal(val, lit) ? 0 : -1;
2305rb_iseq_cdhash_hash(
VALUE a)
2307 switch (OBJ_BUILTIN_TYPE(a)) {
2310 return (st_index_t)a;
2318 return rb_rational_hash(a);
2320 return rb_complex_hash(a);
2330 rb_iseq_cdhash_hash,
2344 rb_hash_aset(data->hash, key,
INT2FIX(lobj->position - (data->pos+data->len)));
2352 return INT2FIX(ISEQ_BODY(iseq)->ivc_size++);
2359 struct rb_id_table *tbl = ISEQ_COMPILE_DATA(iseq)->ivar_cache_table;
2361 if (rb_id_table_lookup(tbl,
id,&val)) {
2366 tbl = rb_id_table_create(1);
2367 ISEQ_COMPILE_DATA(iseq)->ivar_cache_table = tbl;
2369 val =
INT2FIX(ISEQ_BODY(iseq)->icvarc_size++);
2370 rb_id_table_insert(tbl,
id,val);
2374#define BADINSN_DUMP(anchor, list, dest) \
2375 dump_disasm_list_with_cursor(FIRST_ELEMENT(anchor), list, dest)
2377#define BADINSN_ERROR \
2378 (SIZED_FREE_N(generated_iseq, generated_iseq_size), \
2379 SIZED_FREE_N(insns_info, insns_info_size), \
2380 BADINSN_DUMP(anchor, list, NULL), \
2386 int stack_max = 0, sp = 0, line = 0;
2389 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2390 if (IS_LABEL(list)) {
2396 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2397 switch (list->type) {
2398 case ISEQ_ELEMENT_INSN:
2406 sp = calc_sp_depth(sp, iobj);
2408 BADINSN_DUMP(anchor, list, NULL);
2409 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2410 "argument stack underflow (%d)", sp);
2413 if (sp > stack_max) {
2417 line = iobj->insn_info.line_no;
2419 operands = iobj->operands;
2420 insn = iobj->insn_id;
2421 types = insn_op_types(insn);
2422 len = insn_len(insn);
2425 if (iobj->operand_size !=
len - 1) {
2427 BADINSN_DUMP(anchor, list, NULL);
2428 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2429 "operand size miss! (%d for %d)",
2430 iobj->operand_size,
len - 1);
2434 for (j = 0; types[j]; j++) {
2435 if (types[j] == TS_OFFSET) {
2439 BADINSN_DUMP(anchor, list, NULL);
2440 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2441 "unknown label: "LABEL_FORMAT, lobj->label_no);
2444 if (lobj->sp == -1) {
2447 else if (lobj->sp != sp) {
2448 debugs(
"%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2449 RSTRING_PTR(rb_iseq_path(iseq)), line,
2450 lobj->label_no, lobj->sp, sp);
2456 case ISEQ_ELEMENT_LABEL:
2459 if (lobj->sp == -1) {
2463 if (lobj->sp != sp) {
2464 debugs(
"%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2465 RSTRING_PTR(rb_iseq_path(iseq)), line,
2466 lobj->label_no, lobj->sp, sp);
2472 case ISEQ_ELEMENT_TRACE:
2477 case ISEQ_ELEMENT_ADJUST:
2482 sp = adjust->label ? adjust->label->sp : 0;
2483 if (adjust->line_no != -1 && orig_sp - sp < 0) {
2484 BADINSN_DUMP(anchor, list, NULL);
2485 COMPILE_ERROR(iseq, adjust->line_no,
2486 "iseq_set_sequence: adjust bug %d < %d",
2493 BADINSN_DUMP(anchor, list, NULL);
2494 COMPILE_ERROR(iseq, line,
"unknown list type: %d", list->type);
2503 int insns_info_index,
int code_index,
const INSN *iobj)
2505 if (insns_info_index == 0 ||
2506 insns_info[insns_info_index-1].line_no != iobj->insn_info.line_no ||
2507#ifdef USE_ISEQ_NODE_ID
2508 insns_info[insns_info_index-1].node_id != iobj->insn_info.node_id ||
2510 insns_info[insns_info_index-1].events != iobj->insn_info.events) {
2511 insns_info[insns_info_index].line_no = iobj->insn_info.line_no;
2512#ifdef USE_ISEQ_NODE_ID
2513 insns_info[insns_info_index].node_id = iobj->insn_info.node_id;
2515 insns_info[insns_info_index].events = iobj->insn_info.events;
2516 positions[insns_info_index] = code_index;
2524 int insns_info_index,
int code_index,
const ADJUST *adjust)
2526 insns_info[insns_info_index].line_no = adjust->line_no;
2527 insns_info[insns_info_index].node_id = -1;
2528 insns_info[insns_info_index].events = 0;
2529 positions[insns_info_index] = code_index;
2534array_to_idlist(
VALUE arr)
2539 for (
long i = 0; i < size; i++) {
2548idlist_to_array(
const ID *ids)
2565 unsigned int *positions;
2567 VALUE *generated_iseq;
2571 int insn_num, code_index, insns_info_index, sp = 0;
2572 int stack_max = fix_sp_depth(iseq, anchor);
2574 if (stack_max < 0)
return COMPILE_NG;
2577 insn_num = code_index = 0;
2578 for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2579 switch (list->type) {
2580 case ISEQ_ELEMENT_INSN:
2584 sp = calc_sp_depth(sp, iobj);
2586 events = iobj->insn_info.events |= events;
2587 if (ISEQ_COVERAGE(iseq)) {
2588 if (ISEQ_LINE_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_LINE) &&
2589 !(rb_get_coverage_mode() & COVERAGE_TARGET_ONESHOT_LINES)) {
2590 int line = iobj->insn_info.line_no - 1;
2591 if (line >= 0 && line <
RARRAY_LEN(ISEQ_LINE_COVERAGE(iseq))) {
2595 if (ISEQ_BRANCH_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_BRANCH)) {
2596 while (
RARRAY_LEN(ISEQ_PC2BRANCHINDEX(iseq)) <= code_index) {
2602 code_index += insn_data_length(iobj);
2607 case ISEQ_ELEMENT_LABEL:
2610 lobj->position = code_index;
2611 if (lobj->sp != sp) {
2612 debugs(
"%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2613 RSTRING_PTR(rb_iseq_path(iseq)),
2614 lobj->label_no, lobj->sp, sp);
2619 case ISEQ_ELEMENT_TRACE:
2622 events |= trace->event;
2623 if (trace->event & RUBY_EVENT_COVERAGE_BRANCH) data = trace->data;
2626 case ISEQ_ELEMENT_ADJUST:
2629 if (adjust->line_no != -1) {
2631 sp = adjust->label ? adjust->label->sp : 0;
2632 if (orig_sp - sp > 0) {
2633 if (orig_sp - sp > 1) code_index++;
2645 const int generated_iseq_size = code_index;
2648 const int insns_info_size = insn_num;
2651 const int positions_size = insn_num;
2652 positions =
ALLOC_N(
unsigned int, insn_num);
2653 if (ISEQ_IS_SIZE(body)) {
2657 body->is_entries = NULL;
2660 if (body->ci_size) {
2664 body->call_data = NULL;
2666 ISEQ_COMPILE_DATA(iseq)->ci_index = 0;
2673 iseq_bits_t * mark_offset_bits;
2674 int code_size = code_index;
2676 bool needs_bitmap =
false;
2678 const size_t mark_offset_bits_size = ISEQ_MBITS_BUFLEN(code_index);
2679 if (mark_offset_bits_size == 1) {
2680 mark_offset_bits = &ISEQ_COMPILE_DATA(iseq)->mark_bits.single;
2681 ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit =
true;
2684 mark_offset_bits =
ZALLOC_N(iseq_bits_t, mark_offset_bits_size);
2685 ISEQ_COMPILE_DATA(iseq)->mark_bits.list = mark_offset_bits;
2686 ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit =
false;
2689 ISEQ_COMPILE_DATA(iseq)->iseq_encoded = (
void *)generated_iseq;
2690 ISEQ_COMPILE_DATA(iseq)->iseq_size = code_index;
2692 list = FIRST_ELEMENT(anchor);
2693 insns_info_index = code_index = sp = 0;
2696 switch (list->type) {
2697 case ISEQ_ELEMENT_INSN:
2705 sp = calc_sp_depth(sp, iobj);
2707 operands = iobj->operands;
2708 insn = iobj->insn_id;
2709 generated_iseq[code_index] = insn;
2710 types = insn_op_types(insn);
2711 len = insn_len(insn);
2713 for (j = 0; types[j]; j++) {
2714 char type = types[j];
2722 generated_iseq[code_index + 1 + j] = lobj->position - (code_index +
len);
2727 VALUE map = operands[j];
2730 data.pos = code_index;
2734 rb_hash_rehash(map);
2735 freeze_hide_obj(map);
2737 generated_iseq[code_index + 1 + j] = map;
2738 ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2740 needs_bitmap =
true;
2745 generated_iseq[code_index + 1 + j] =
FIX2INT(operands[j]);
2750 VALUE v = operands[j];
2751 generated_iseq[code_index + 1 + j] = v;
2755 ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2756 needs_bitmap =
true;
2763 unsigned int ic_index = ISEQ_COMPILE_DATA(iseq)->ic_index++;
2764 IC ic = &ISEQ_IS_ENTRY_START(body,
type)[ic_index].ic_cache;
2765 if (UNLIKELY(ic_index >= body->ic_size)) {
2766 BADINSN_DUMP(anchor, &iobj->link, 0);
2767 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2768 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2769 ic_index, ISEQ_IS_SIZE(body));
2772 ic->
segments = array_to_idlist(operands[j]);
2774 generated_iseq[code_index + 1 + j] = (
VALUE)ic;
2779 unsigned int ic_index =
FIX2UINT(operands[j]);
2781 IVC cache = ((
IVC)&body->is_entries[ic_index]);
2783 if (insn == BIN(setinstancevariable)) {
2784 cache->iv_set_name =
SYM2ID(operands[j - 1]);
2787 cache->iv_set_name = 0;
2790 vm_ic_attr_index_initialize(cache, INVALID_SHAPE_ID);
2795 unsigned int ic_index =
FIX2UINT(operands[j]);
2796 IC ic = &ISEQ_IS_ENTRY_START(body,
type)[ic_index].ic_cache;
2797 if (UNLIKELY(ic_index >= ISEQ_IS_SIZE(body))) {
2798 BADINSN_DUMP(anchor, &iobj->link, 0);
2799 COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2800 "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2801 ic_index, ISEQ_IS_SIZE(body));
2803 generated_iseq[code_index + 1 + j] = (
VALUE)ic;
2810 RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq)->ci_index <= body->ci_size);
2811 struct rb_call_data *cd = &body->call_data[ISEQ_COMPILE_DATA(iseq)->ci_index++];
2813 cd->cc = vm_cc_empty();
2814 generated_iseq[code_index + 1 + j] = (
VALUE)cd;
2818 generated_iseq[code_index + 1 + j] =
SYM2ID(operands[j]);
2821 generated_iseq[code_index + 1 + j] = operands[j];
2824 generated_iseq[code_index + 1 + j] = operands[j];
2827 BADINSN_ERROR(iseq, iobj->insn_info.line_no,
2828 "unknown operand type: %c",
type);
2832 if (add_insn_info(insns_info, positions, insns_info_index, code_index, iobj)) insns_info_index++;
2836 case ISEQ_ELEMENT_LABEL:
2839 if (lobj->sp != sp) {
2840 debugs(
"%s: sp inconsistency found but ignored (" LABEL_FORMAT
" sp: %d, calculated sp: %d)\n",
2841 RSTRING_PTR(rb_iseq_path(iseq)),
2842 lobj->label_no, lobj->sp, sp);
2847 case ISEQ_ELEMENT_ADJUST:
2852 if (adjust->label) {
2853 sp = adjust->label->sp;
2859 if (adjust->line_no != -1) {
2860 const int diff = orig_sp - sp;
2862 if (insns_info_index == 0) {
2863 COMPILE_ERROR(iseq, adjust->line_no,
2864 "iseq_set_sequence: adjust bug (ISEQ_ELEMENT_ADJUST must not be the first in iseq)");
2866 if (add_adjust_info(insns_info, positions, insns_info_index, code_index, adjust)) insns_info_index++;
2869 generated_iseq[code_index++] = BIN(adjuststack);
2870 generated_iseq[code_index++] = orig_sp - sp;
2872 else if (diff == 1) {
2873 generated_iseq[code_index++] = BIN(pop);
2875 else if (diff < 0) {
2876 int label_no = adjust->label ? adjust->label->label_no : -1;
2877 SIZED_FREE_N(generated_iseq, generated_iseq_size);
2878 SIZED_FREE_N(insns_info, insns_info_size);
2879 SIZED_FREE_N(positions, positions_size);
2880 if (ISEQ_MBITS_BUFLEN(code_size) > 1) {
2881 SIZED_FREE_N(mark_offset_bits, ISEQ_MBITS_BUFLEN(code_index));
2883 debug_list(anchor, list);
2884 COMPILE_ERROR(iseq, adjust->line_no,
2885 "iseq_set_sequence: adjust bug to %d %d < %d",
2886 label_no, orig_sp, sp);
2899 body->iseq_encoded = (
void *)generated_iseq;
2900 body->iseq_size = code_index;
2901 body->stack_max = stack_max;
2903 if (ISEQ_COMPILE_DATA(iseq)->is_single_mark_bit) {
2904 body->mark_bits.single = ISEQ_COMPILE_DATA(iseq)->mark_bits.single;
2908 body->mark_bits.list = mark_offset_bits;
2911 body->mark_bits.list = NULL;
2912 ISEQ_COMPILE_DATA(iseq)->mark_bits.list = NULL;
2913 SIZED_FREE_N(mark_offset_bits, mark_offset_bits_size);
2918 body->insns_info.body = insns_info;
2919 body->insns_info.positions = positions;
2922 body->insns_info.body = insns_info;
2923 SIZED_REALLOC_N(positions,
unsigned int, insns_info_index, positions_size);
2924 body->insns_info.positions = positions;
2925 body->insns_info.size = insns_info_index;
2931label_get_position(
LABEL *lobj)
2933 return lobj->position;
2937label_get_sp(
LABEL *lobj)
2943iseq_set_exception_table(
rb_iseq_t *iseq)
2945 const VALUE *tptr, *ptr;
2946 unsigned int tlen, i;
2949 ISEQ_BODY(iseq)->catch_table = NULL;
2951 VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
2952 if (
NIL_P(catch_table_ary))
return COMPILE_OK;
2960 for (i = 0; i < table->size; i++) {
2963 entry = UNALIGNED_MEMBER_PTR(table, entries[i]);
2964 entry->type = (
enum rb_catch_type)(ptr[0] & 0xffff);
2965 pos = label_get_position((
LABEL *)(ptr[1] & ~1));
2967 entry->start = (
unsigned int)pos;
2968 pos = label_get_position((
LABEL *)(ptr[2] & ~1));
2970 entry->end = (
unsigned int)pos;
2977 entry->cont = label_get_position(lobj);
2978 entry->sp = label_get_sp(lobj);
2981 if (entry->type == CATCH_TYPE_RESCUE ||
2982 entry->type == CATCH_TYPE_BREAK ||
2983 entry->type == CATCH_TYPE_NEXT) {
2992 ISEQ_BODY(iseq)->catch_table = table;
2993 RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, 0);
3014 VALUE *opt_table = (
VALUE *)ISEQ_BODY(iseq)->param.opt_table;
3016 if (ISEQ_BODY(iseq)->param.flags.has_opt) {
3017 for (i = 0; i < ISEQ_BODY(iseq)->param.opt_num + 1; i++) {
3018 opt_table[i] = label_get_position((
LABEL *)opt_table[i]);
3025get_destination_insn(
INSN *iobj)
3031 list = lobj->link.next;
3033 switch (list->type) {
3034 case ISEQ_ELEMENT_INSN:
3035 case ISEQ_ELEMENT_ADJUST:
3037 case ISEQ_ELEMENT_LABEL:
3040 case ISEQ_ELEMENT_TRACE:
3043 events |= trace->event;
3051 if (list && IS_INSN(list)) {
3053 iobj->insn_info.events |= events;
3059get_next_insn(
INSN *iobj)
3064 if (IS_INSN(list) || IS_ADJUST(list)) {
3073get_prev_insn(
INSN *iobj)
3078 if (IS_INSN(list) || IS_ADJUST(list)) {
3087unref_destination(
INSN *iobj,
int pos)
3089 LABEL *lobj = (
LABEL *)OPERAND_AT(iobj, pos);
3091 if (!lobj->refcnt) ELEM_REMOVE(&lobj->link);
3095replace_destination(
INSN *dobj,
INSN *nobj)
3097 VALUE n = OPERAND_AT(nobj, 0);
3100 if (dl == nl)
return false;
3103 OPERAND_AT(dobj, 0) = n;
3104 if (!dl->refcnt) ELEM_REMOVE(&dl->link);
3109find_destination(
INSN *i)
3111 int pos,
len = insn_len(i->insn_id);
3112 for (pos = 0; pos <
len; ++pos) {
3113 if (insn_op_types(i->insn_id)[pos] == TS_OFFSET) {
3114 return (
LABEL *)OPERAND_AT(i, pos);
3124 int *unref_counts = 0, nlabels = ISEQ_COMPILE_DATA(iseq)->label_no;
3127 unref_counts =
ALLOCA_N(
int, nlabels);
3128 MEMZERO(unref_counts,
int, nlabels);
3133 if (IS_INSN_ID(i, leave)) {
3137 else if ((lab = find_destination((
INSN *)i)) != 0) {
3138 unref_counts[lab->label_no]++;
3141 else if (IS_LABEL(i)) {
3143 if (lab->unremovable)
return 0;
3144 if (lab->refcnt > unref_counts[lab->label_no]) {
3145 if (i == first)
return 0;
3150 else if (IS_TRACE(i)) {
3153 else if (IS_ADJUST(i)) {
3157 }
while ((i = i->next) != 0);
3162 VALUE insn = INSN_OF(i);
3163 int pos,
len = insn_len(insn);
3164 for (pos = 0; pos <
len; ++pos) {
3165 switch (insn_op_types(insn)[pos]) {
3167 unref_destination((
INSN *)i, pos);
3176 }
while ((i != end) && (i = i->next) != 0);
3183 switch (OPERAND_AT(iobj, 0)) {
3185 ELEM_REMOVE(&iobj->link);
3188 ELEM_REMOVE(&iobj->link);
3191 iobj->insn_id = BIN(adjuststack);
3197is_frozen_putstring(
INSN *insn,
VALUE *op)
3199 if (IS_INSN_ID(insn, putstring) || IS_INSN_ID(insn, putchilledstring)) {
3200 *op = OPERAND_AT(insn, 0);
3203 else if (IS_INSN_ID(insn, putobject)) {
3204 *op = OPERAND_AT(insn, 0);
3215 if (prev->type == ISEQ_ELEMENT_LABEL) {
3217 if (label->refcnt > 0) {
3221 else if (prev->type == ISEQ_ELEMENT_INSN) {
3254 INSN *niobj, *ciobj, *dup = 0;
3258 switch (INSN_OF(iobj)) {
3259 case BIN(putstring):
3260 case BIN(putchilledstring):
3266 case BIN(putobject):
3269 default:
return FALSE;
3272 ciobj = (
INSN *)get_next_insn(iobj);
3273 if (IS_INSN_ID(ciobj, jump)) {
3274 ciobj = (
INSN *)get_next_insn((
INSN*)OPERAND_AT(ciobj, 0));
3276 if (IS_INSN_ID(ciobj, dup)) {
3277 ciobj = (
INSN *)get_next_insn(dup = ciobj);
3279 if (!ciobj || !IS_INSN_ID(ciobj, checktype))
return FALSE;
3280 niobj = (
INSN *)get_next_insn(ciobj);
3285 switch (INSN_OF(niobj)) {
3287 if (OPERAND_AT(ciobj, 0) ==
type) {
3288 dest = (
LABEL *)OPERAND_AT(niobj, 0);
3291 case BIN(branchunless):
3292 if (OPERAND_AT(ciobj, 0) !=
type) {
3293 dest = (
LABEL *)OPERAND_AT(niobj, 0);
3299 line = ciobj->insn_info.line_no;
3300 node_id = ciobj->insn_info.node_id;
3302 if (niobj->link.next && IS_LABEL(niobj->link.next)) {
3303 dest = (
LABEL *)niobj->link.next;
3306 dest = NEW_LABEL(line);
3307 ELEM_INSERT_NEXT(&niobj->link, &dest->link);
3310 INSERT_AFTER_INSN1(iobj, line, node_id, jump, dest);
3312 if (!dup) INSERT_AFTER_INSN(iobj, line, node_id, pop);
3319 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3320 vm_ci_flag(ci) | add,
3330 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3338#define vm_ci_simple(ci) (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)
3346 optimize_checktype(iseq, iobj);
3348 if (IS_INSN_ID(iobj, jump)) {
3349 INSN *niobj, *diobj, *piobj;
3350 diobj = (
INSN *)get_destination_insn(iobj);
3351 niobj = (
INSN *)get_next_insn(iobj);
3353 if (diobj == niobj) {
3360 unref_destination(iobj, 0);
3361 ELEM_REMOVE(&iobj->link);
3364 else if (iobj != diobj && IS_INSN(&diobj->link) &&
3365 IS_INSN_ID(diobj, jump) &&
3366 OPERAND_AT(iobj, 0) != OPERAND_AT(diobj, 0) &&
3367 diobj->insn_info.events == 0) {
3378 if (replace_destination(iobj, diobj)) {
3379 remove_unreachable_chunk(iseq, iobj->link.next);
3383 else if (IS_INSN_ID(diobj, leave)) {
3396 unref_destination(iobj, 0);
3397 iobj->insn_id = BIN(leave);
3398 iobj->operand_size = 0;
3399 iobj->insn_info = diobj->insn_info;
3402 else if (IS_INSN(iobj->link.prev) &&
3403 (piobj = (
INSN *)iobj->link.prev) &&
3404 (IS_INSN_ID(piobj, branchif) ||
3405 IS_INSN_ID(piobj, branchunless))) {
3406 INSN *pdiobj = (
INSN *)get_destination_insn(piobj);
3407 if (niobj == pdiobj) {
3408 int refcnt = IS_LABEL(piobj->link.next) ?
3409 ((
LABEL *)piobj->link.next)->refcnt : 0;
3424 piobj->insn_id = (IS_INSN_ID(piobj, branchif))
3425 ? BIN(branchunless) : BIN(branchif);
3426 if (replace_destination(piobj, iobj) && refcnt <= 1) {
3427 ELEM_REMOVE(&iobj->link);
3434 else if (diobj == pdiobj) {
3448 INSN *popiobj = new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, BIN(pop), 0, 0);
3449 ELEM_REPLACE(&piobj->link, &popiobj->link);
3452 if (remove_unreachable_chunk(iseq, iobj->link.next)) {
3466 if (IS_INSN_ID(iobj, newrange)) {
3467 INSN *
const range = iobj;
3469 VALUE str_beg, str_end;
3471 if ((end = (
INSN *)get_prev_insn(range)) != 0 &&
3472 is_frozen_putstring(end, &str_end) &&
3473 (beg = (
INSN *)get_prev_insn(end)) != 0 &&
3474 is_frozen_putstring(beg, &str_beg) &&
3475 !(insn_has_label_before(&beg->link) || insn_has_label_before(&end->link))) {
3476 int excl =
FIX2INT(OPERAND_AT(range, 0));
3479 ELEM_REMOVE(&beg->link);
3480 ELEM_REMOVE(&end->link);
3481 range->insn_id = BIN(putobject);
3482 OPERAND_AT(range, 0) = lit_range;
3487 if (IS_INSN_ID(iobj, leave)) {
3488 remove_unreachable_chunk(iseq, iobj->link.next);
3500 if (IS_INSN_ID(iobj, duparray)) {
3502 if (IS_INSN(next) && (IS_INSN_ID(next, concatarray) || IS_INSN_ID(next, concattoarray))) {
3503 iobj->insn_id = BIN(putobject);
3513 if (IS_INSN_ID(iobj, duparray)) {
3515 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3519 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3520 VALUE ary = iobj->operands[0];
3523 insn_replace_with_operands(iseq, iobj, BIN(opt_ary_freeze), 2, ary, (
VALUE)ci);
3535 if (IS_INSN_ID(iobj, duphash)) {
3537 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3541 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3542 VALUE hash = iobj->operands[0];
3544 RB_OBJ_SET_SHAREABLE(hash);
3546 insn_replace_with_operands(iseq, iobj, BIN(opt_hash_freeze), 2, hash, (
VALUE)ci);
3558 if (IS_INSN_ID(iobj, newarray) && iobj->operands[0] ==
INT2FIX(0)) {
3560 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3564 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3565 insn_replace_with_operands(iseq, iobj, BIN(opt_ary_freeze), 2, rb_cArray_empty_frozen, (
VALUE)ci);
3577 if (IS_INSN_ID(iobj, newhash) && iobj->operands[0] ==
INT2FIX(0)) {
3579 if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3583 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3584 insn_replace_with_operands(iseq, iobj, BIN(opt_hash_freeze), 2, rb_cHash_empty_frozen, (
VALUE)ci);
3590 if (IS_INSN_ID(iobj, branchif) ||
3591 IS_INSN_ID(iobj, branchnil) ||
3592 IS_INSN_ID(iobj, branchunless)) {
3601 INSN *nobj = (
INSN *)get_destination_insn(iobj);
3623 int stop_optimization =
3624 ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq) &&
3625 nobj->link.type == ISEQ_ELEMENT_INSN &&
3626 nobj->insn_info.events;
3627 if (!stop_optimization) {
3628 INSN *pobj = (
INSN *)iobj->link.prev;
3631 if (!IS_INSN(&pobj->link))
3633 else if (IS_INSN_ID(pobj, dup))
3638 if (IS_INSN(&nobj->link) && IS_INSN_ID(nobj, jump)) {
3639 if (!replace_destination(iobj, nobj))
break;
3641 else if (prev_dup && IS_INSN_ID(nobj, dup) &&
3642 !!(nobj = (
INSN *)nobj->link.next) &&
3644 nobj->insn_id == iobj->insn_id) {
3660 if (!replace_destination(iobj, nobj))
break;
3688 if (prev_dup && IS_INSN(pobj->link.prev)) {
3689 pobj = (
INSN *)pobj->link.prev;
3691 if (IS_INSN_ID(pobj, putobject)) {
3692 cond = (IS_INSN_ID(iobj, branchif) ?
3693 OPERAND_AT(pobj, 0) !=
Qfalse :
3694 IS_INSN_ID(iobj, branchunless) ?
3695 OPERAND_AT(pobj, 0) ==
Qfalse :
3698 else if (IS_INSN_ID(pobj, putstring) ||
3699 IS_INSN_ID(pobj, duparray) ||
3700 IS_INSN_ID(pobj, newarray)) {
3701 cond = IS_INSN_ID(iobj, branchif);
3703 else if (IS_INSN_ID(pobj, putnil)) {
3704 cond = !IS_INSN_ID(iobj, branchif);
3707 if (prev_dup || !IS_INSN_ID(pobj, newarray)) {
3708 ELEM_REMOVE(iobj->link.prev);
3710 else if (!iseq_pop_newarray(iseq, pobj)) {
3711 pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(pop), 0, NULL);
3712 ELEM_INSERT_PREV(&iobj->link, &pobj->link);
3716 pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(putnil), 0, NULL);
3717 ELEM_INSERT_NEXT(&iobj->link, &pobj->link);
3719 iobj->insn_id = BIN(jump);
3723 unref_destination(iobj, 0);
3724 ELEM_REMOVE(&iobj->link);
3729 nobj = (
INSN *)get_destination_insn(nobj);
3734 if (IS_INSN_ID(iobj, pop)) {
3742 if (IS_INSN(prev)) {
3743 enum ruby_vminsn_type previ = ((
INSN *)prev)->insn_id;
3744 if (previ == BIN(putobject) || previ == BIN(putnil) ||
3745 previ == BIN(putself) || previ == BIN(putstring) ||
3746 previ == BIN(putchilledstring) ||
3747 previ == BIN(dup) ||
3748 previ == BIN(getlocal) ||
3749 previ == BIN(getblockparam) ||
3750 previ == BIN(getblockparamproxy) ||
3751 previ == BIN(getinstancevariable) ||
3752 previ == BIN(duparray)) {
3756 ELEM_REMOVE(&iobj->link);
3758 else if (previ == BIN(newarray) && iseq_pop_newarray(iseq, (
INSN*)prev)) {
3759 ELEM_REMOVE(&iobj->link);
3761 else if (previ == BIN(concatarray)) {
3763 INSERT_BEFORE_INSN1(piobj, piobj->insn_info.line_no, piobj->insn_info.node_id, splatarray,
Qfalse);
3764 INSN_OF(piobj) = BIN(pop);
3766 else if (previ == BIN(concatstrings)) {
3767 if (OPERAND_AT(prev, 0) ==
INT2FIX(1)) {
3771 ELEM_REMOVE(&iobj->link);
3772 INSN_OF(prev) = BIN(adjuststack);
3778 if (IS_INSN_ID(iobj, newarray) ||
3779 IS_INSN_ID(iobj, duparray) ||
3780 IS_INSN_ID(iobj, concatarray) ||
3781 IS_INSN_ID(iobj, splatarray) ||
3791 if (IS_INSN(next) && IS_INSN_ID(next, splatarray)) {
3797 if (IS_INSN_ID(iobj, newarray)) {
3799 if (IS_INSN(next) && IS_INSN_ID(next, expandarray) &&
3800 OPERAND_AT(next, 1) ==
INT2FIX(0)) {
3802 op1 = OPERAND_AT(iobj, 0);
3803 op2 = OPERAND_AT(next, 0);
3814 INSN_OF(iobj) = BIN(swap);
3815 iobj->operand_size = 0;
3824 INSN_OF(iobj) = BIN(opt_reverse);
3829 INSN_OF(iobj) = BIN(opt_reverse);
3830 OPERAND_AT(iobj, 0) = OPERAND_AT(next, 0);
3840 for (; diff > 0; diff--) {
3841 INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, pop);
3852 for (; diff < 0; diff++) {
3853 INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, putnil);
3860 if (IS_INSN_ID(iobj, duparray)) {
3869 if (IS_INSN(next) && IS_INSN_ID(next, expandarray)) {
3870 INSN_OF(iobj) = BIN(putobject);
3874 if (IS_INSN_ID(iobj, anytostring)) {
3882 if (IS_INSN(next) && IS_INSN_ID(next, concatstrings) &&
3883 OPERAND_AT(next, 0) ==
INT2FIX(1)) {
3888 if (IS_INSN_ID(iobj, putstring) || IS_INSN_ID(iobj, putchilledstring) ||
3896 if (IS_NEXT_INSN_ID(&iobj->link, concatstrings) &&
3897 RSTRING_LEN(OPERAND_AT(iobj, 0)) == 0) {
3898 INSN *next = (
INSN *)iobj->link.next;
3899 if ((OPERAND_AT(next, 0) = FIXNUM_INC(OPERAND_AT(next, 0), -1)) ==
INT2FIX(1)) {
3900 ELEM_REMOVE(&next->link);
3902 ELEM_REMOVE(&iobj->link);
3904 if (IS_NEXT_INSN_ID(&iobj->link, toregexp)) {
3905 INSN *next = (
INSN *)iobj->link.next;
3906 if (OPERAND_AT(next, 1) ==
INT2FIX(1)) {
3907 VALUE src = OPERAND_AT(iobj, 0);
3908 int opt = (int)
FIX2LONG(OPERAND_AT(next, 0));
3909 VALUE path = rb_iseq_path(iseq);
3910 int line = iobj->insn_info.line_no;
3911 VALUE errinfo = rb_errinfo();
3912 VALUE re = rb_reg_compile(src, opt, RSTRING_PTR(path), line);
3914 VALUE message = rb_attr_get(rb_errinfo(), idMesg);
3915 rb_set_errinfo(errinfo);
3916 COMPILE_ERROR(iseq, line,
"%" PRIsVALUE, message);
3919 RB_OBJ_SET_SHAREABLE(re);
3922 ELEM_REMOVE(iobj->link.next);
3927 if (IS_INSN_ID(iobj, concatstrings)) {
3936 if (IS_INSN(next) && IS_INSN_ID(next, jump))
3937 next = get_destination_insn(jump = (
INSN *)next);
3938 if (IS_INSN(next) && IS_INSN_ID(next, concatstrings)) {
3939 int n =
FIX2INT(OPERAND_AT(iobj, 0)) +
FIX2INT(OPERAND_AT(next, 0)) - 1;
3940 OPERAND_AT(iobj, 0) =
INT2FIX(n);
3942 LABEL *label = ((
LABEL *)OPERAND_AT(jump, 0));
3943 if (!--label->refcnt) {
3944 ELEM_REMOVE(&label->link);
3947 label = NEW_LABEL(0);
3948 OPERAND_AT(jump, 0) = (
VALUE)label;
3951 ELEM_INSERT_NEXT(next, &label->link);
3952 CHECK(iseq_peephole_optimize(iseq, get_next_insn(jump), do_tailcallopt));
3960 if (do_tailcallopt &&
3961 (IS_INSN_ID(iobj, send) ||
3962 IS_INSN_ID(iobj, invokesuper))) {
3971 if (iobj->link.next) {
3974 if (!IS_INSN(next)) {
3978 switch (INSN_OF(next)) {
3987 next = get_destination_insn((
INSN *)next);
4001 if (IS_INSN_ID(piobj, send) ||
4002 IS_INSN_ID(piobj, invokesuper)) {
4003 if (OPERAND_AT(piobj, 1) == 0) {
4004 ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
4005 OPERAND_AT(piobj, 0) = (
VALUE)ci;
4010 ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
4011 OPERAND_AT(piobj, 0) = (
VALUE)ci;
4017 if (IS_INSN_ID(iobj, dup)) {
4018 if (IS_NEXT_INSN_ID(&iobj->link, setlocal)) {
4029 if (IS_NEXT_INSN_ID(set1, setlocal)) {
4031 if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
4032 OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
4034 ELEM_REMOVE(&iobj->link);
4047 else if (IS_NEXT_INSN_ID(set1, dup) &&
4048 IS_NEXT_INSN_ID(set1->next, setlocal)) {
4049 set2 = set1->next->next;
4050 if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
4051 OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
4052 ELEM_REMOVE(set1->next);
4066 if (IS_INSN_ID(iobj, getlocal)) {
4068 if (IS_NEXT_INSN_ID(niobj, dup)) {
4069 niobj = niobj->next;
4071 if (IS_NEXT_INSN_ID(niobj, setlocal)) {
4073 if (OPERAND_AT(iobj, 0) == OPERAND_AT(set1, 0) &&
4074 OPERAND_AT(iobj, 1) == OPERAND_AT(set1, 1)) {
4090 if (IS_INSN_ID(iobj, opt_invokebuiltin_delegate)) {
4091 if (IS_TRACE(iobj->link.next)) {
4092 if (IS_NEXT_INSN_ID(iobj->link.next, leave)) {
4093 iobj->insn_id = BIN(opt_invokebuiltin_delegate_leave);
4095 if (iobj == (
INSN *)list && bf->argc == 0 && (ISEQ_BODY(iseq)->builtin_attrs & BUILTIN_ATTR_LEAF)) {
4096 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_SINGLE_NOARG_LEAF;
4109 if (IS_INSN_ID(iobj, getblockparam)) {
4110 if (IS_NEXT_INSN_ID(&iobj->link, branchif) || IS_NEXT_INSN_ID(&iobj->link, branchunless)) {
4111 iobj->insn_id = BIN(getblockparamproxy);
4115 if (IS_INSN_ID(iobj, splatarray) && OPERAND_AT(iobj, 0) ==
false) {
4117 if (IS_NEXT_INSN_ID(niobj, duphash)) {
4118 niobj = niobj->next;
4120 unsigned int set_flags = 0, unset_flags = 0;
4133 if (IS_NEXT_INSN_ID(niobj, send)) {
4134 siobj = niobj->next;
4135 set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT;
4136 unset_flags = VM_CALL_ARGS_BLOCKARG;
4151 else if ((IS_NEXT_INSN_ID(niobj, getlocal) || IS_NEXT_INSN_ID(niobj, getinstancevariable) ||
4152 IS_NEXT_INSN_ID(niobj, getblockparamproxy)) && (IS_NEXT_INSN_ID(niobj->next, send))) {
4153 siobj = niobj->next->next;
4154 set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT|VM_CALL_ARGS_BLOCKARG;
4159 unsigned int flags = vm_ci_flag(ci);
4160 if ((flags & set_flags) == set_flags && !(flags & unset_flags)) {
4161 ((
INSN*)niobj)->insn_id = BIN(putobject);
4162 RB_OBJ_WRITE(iseq, &OPERAND_AT(niobj, 0), RB_OBJ_SET_SHAREABLE(rb_hash_freeze(rb_hash_resurrect(OPERAND_AT(niobj, 0)))));
4164 const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
4165 flags & ~VM_CALL_KW_SPLAT_MUT, vm_ci_argc(ci), vm_ci_kwarg(ci));
4167 OPERAND_AT(siobj, 0) = (
VALUE)nci;
4177insn_set_specialized_instruction(
rb_iseq_t *iseq,
INSN *iobj,
int insn_id)
4179 if (insn_id == BIN(opt_neq)) {
4180 VALUE original_ci = iobj->operands[0];
4181 VALUE new_ci = (
VALUE)new_callinfo(iseq, idEq, 1, 0, NULL, FALSE);
4182 insn_replace_with_operands(iseq, iobj, insn_id, 2, new_ci, original_ci);
4185 iobj->insn_id = insn_id;
4186 iobj->operand_size = insn_len(insn_id) - 1;
4196 if (IS_INSN_ID(iobj, newarray) && iobj->link.next &&
4197 IS_INSN(iobj->link.next)) {
4201 INSN *niobj = (
INSN *)iobj->link.next;
4202 if (IS_INSN_ID(niobj, send)) {
4204 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0) {
4206 switch (vm_ci_mid(ci)) {
4208 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_MAX);
4211 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_MIN);
4214 method =
INT2FIX(VM_OPT_NEWARRAY_SEND_HASH);
4219 VALUE num = iobj->operands[0];
4220 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, num, method);
4221 ELEM_REMOVE(&niobj->link);
4226 else if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4228 IS_NEXT_INSN_ID(&niobj->link, send)) {
4230 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idPack) {
4231 VALUE num = iobj->operands[0];
4232 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 1),
INT2FIX(VM_OPT_NEWARRAY_SEND_PACK));
4233 ELEM_REMOVE(&iobj->link);
4234 ELEM_REMOVE(niobj->link.next);
4235 ELEM_INSERT_NEXT(&niobj->link, &iobj->link);
4241 else if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4243 IS_NEXT_INSN_ID(&niobj->link, getlocal) &&
4244 (niobj->link.next && IS_NEXT_INSN_ID(niobj->link.next, send))) {
4247 if (vm_ci_mid(ci) == idPack && vm_ci_argc(ci) == 2 &&
4248 (kwarg && kwarg->keyword_len == 1 && kwarg->keywords[0] ==
rb_id2sym(idBuffer))) {
4249 VALUE num = iobj->operands[0];
4250 insn_replace_with_operands(iseq, iobj, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 2),
INT2FIX(VM_OPT_NEWARRAY_SEND_PACK_BUFFER));
4252 ELEM_REMOVE((niobj->link.next)->next);
4254 ELEM_REMOVE(&iobj->link);
4256 ELEM_INSERT_NEXT(niobj->link.next, &iobj->link);
4264 if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4265 IS_INSN_ID(niobj, putobject) ||
4266 IS_INSN_ID(niobj, putself) ||
4267 IS_INSN_ID(niobj, getlocal) ||
4268 IS_INSN_ID(niobj, getinstancevariable)) &&
4269 IS_NEXT_INSN_ID(&niobj->link, send)) {
4276 sendobj = sendobj->next;
4277 ci = (
struct rb_callinfo *)OPERAND_AT(sendobj, 0);
4278 }
while (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && IS_NEXT_INSN_ID(sendobj, send));
4281 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idIncludeP) {
4282 VALUE num = iobj->operands[0];
4284 insn_replace_with_operands(iseq, sendins, BIN(opt_newarray_send), 2, FIXNUM_INC(num, 1),
INT2FIX(VM_OPT_NEWARRAY_SEND_INCLUDE_P));
4286 ELEM_REMOVE(&iobj->link);
4300 if (IS_INSN_ID(iobj, duparray) && iobj->link.next && IS_INSN(iobj->link.next)) {
4301 INSN *niobj = (
INSN *)iobj->link.next;
4302 if ((IS_INSN_ID(niobj, getlocal) ||
4303 IS_INSN_ID(niobj, getinstancevariable) ||
4304 IS_INSN_ID(niobj, putself)) &&
4305 IS_NEXT_INSN_ID(&niobj->link, send)) {
4312 sendobj = sendobj->next;
4313 ci = (
struct rb_callinfo *)OPERAND_AT(sendobj, 0);
4314 }
while (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && IS_NEXT_INSN_ID(sendobj, send));
4316 if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idIncludeP) {
4318 VALUE ary = iobj->operands[0];
4322 insn_replace_with_operands(iseq, sendins, BIN(opt_duparray_send), 3, ary,
rb_id2sym(idIncludeP),
INT2FIX(1));
4325 ELEM_REMOVE(&iobj->link);
4332 if (IS_INSN_ID(iobj, send)) {
4336#define SP_INSN(opt) insn_set_specialized_instruction(iseq, iobj, BIN(opt_##opt))
4337 if (vm_ci_simple(ci)) {
4338 switch (vm_ci_argc(ci)) {
4340 switch (vm_ci_mid(ci)) {
4341 case idLength: SP_INSN(length);
return COMPILE_OK;
4342 case idSize: SP_INSN(size);
return COMPILE_OK;
4343 case idEmptyP: SP_INSN(empty_p);
return COMPILE_OK;
4344 case idNilP: SP_INSN(nil_p);
return COMPILE_OK;
4345 case idSucc: SP_INSN(succ);
return COMPILE_OK;
4346 case idNot: SP_INSN(not);
return COMPILE_OK;
4350 switch (vm_ci_mid(ci)) {
4351 case idPLUS: SP_INSN(plus);
return COMPILE_OK;
4352 case idMINUS: SP_INSN(minus);
return COMPILE_OK;
4353 case idMULT: SP_INSN(mult);
return COMPILE_OK;
4354 case idDIV: SP_INSN(div);
return COMPILE_OK;
4355 case idMOD: SP_INSN(mod);
return COMPILE_OK;
4356 case idEq: SP_INSN(eq);
return COMPILE_OK;
4357 case idNeq: SP_INSN(neq);
return COMPILE_OK;
4358 case idEqTilde:SP_INSN(regexpmatch2);
return COMPILE_OK;
4359 case idLT: SP_INSN(lt);
return COMPILE_OK;
4360 case idLE: SP_INSN(le);
return COMPILE_OK;
4361 case idGT: SP_INSN(gt);
return COMPILE_OK;
4362 case idGE: SP_INSN(ge);
return COMPILE_OK;
4363 case idLTLT: SP_INSN(ltlt);
return COMPILE_OK;
4364 case idAREF: SP_INSN(aref);
return COMPILE_OK;
4365 case idAnd: SP_INSN(and);
return COMPILE_OK;
4366 case idOr: SP_INSN(or);
return COMPILE_OK;
4370 switch (vm_ci_mid(ci)) {
4371 case idASET: SP_INSN(aset);
return COMPILE_OK;
4377 if ((vm_ci_flag(ci) & (VM_CALL_ARGS_BLOCKARG | VM_CALL_FORWARDING)) == 0 && blockiseq == NULL) {
4378 iobj->insn_id = BIN(opt_send_without_block);
4379 iobj->operand_size = insn_len(iobj->insn_id) - 1;
4390 switch (ISEQ_BODY(iseq)->
type) {
4392 case ISEQ_TYPE_EVAL:
4393 case ISEQ_TYPE_MAIN:
4395 case ISEQ_TYPE_RESCUE:
4396 case ISEQ_TYPE_ENSURE:
4408 const int do_peepholeopt = ISEQ_COMPILE_DATA(iseq)->option->peephole_optimization;
4409 const int do_tailcallopt = tailcallable_p(iseq) &&
4410 ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization;
4411 const int do_si = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction;
4412 const int do_ou = ISEQ_COMPILE_DATA(iseq)->option->operands_unification;
4413 const int do_without_ints = ISEQ_BODY(iseq)->builtin_attrs & BUILTIN_ATTR_WITHOUT_INTERRUPTS;
4414 int rescue_level = 0;
4415 int tailcallopt = do_tailcallopt;
4417 list = FIRST_ELEMENT(anchor);
4419 int do_block_optimization = 0;
4420 LABEL * block_loop_label = NULL;
4423 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_BLOCK) {
4424 do_block_optimization = 1;
4429 if (IS_INSN(le) && IS_INSN_ID((
INSN *)le, nop) && IS_LABEL(le->next)) {
4430 block_loop_label = (
LABEL *)le->next;
4435 if (IS_INSN(list)) {
4436 if (do_peepholeopt) {
4437 iseq_peephole_optimize(iseq, list, tailcallopt);
4440 iseq_specialized_instruction(iseq, (
INSN *)list);
4443 insn_operands_unification((
INSN *)list);
4446 if (do_without_ints) {
4448 if (IS_INSN_ID(item, jump)) {
4449 item->insn_id = BIN(jump_without_ints);
4451 else if (IS_INSN_ID(item, branchif)) {
4452 item->insn_id = BIN(branchif_without_ints);
4454 else if (IS_INSN_ID(item, branchunless)) {
4455 item->insn_id = BIN(branchunless_without_ints);
4457 else if (IS_INSN_ID(item, branchnil)) {
4458 item->insn_id = BIN(branchnil_without_ints);
4462 if (do_block_optimization) {
4465 if (IS_INSN_ID(item,
throw)) {
4466 do_block_optimization = 0;
4471 const char *types = insn_op_types(item->insn_id);
4472 for (
int j = 0; types[j]; j++) {
4473 if (types[j] == TS_OFFSET) {
4478 LABEL * target = (
LABEL *)OPERAND_AT(item, j);
4479 if (target == block_loop_label) {
4480 do_block_optimization = 0;
4487 if (IS_LABEL(list)) {
4488 switch (((
LABEL *)list)->rescued) {
4489 case LABEL_RESCUE_BEG:
4491 tailcallopt = FALSE;
4493 case LABEL_RESCUE_END:
4494 if (!--rescue_level) tailcallopt = do_tailcallopt;
4501 if (do_block_optimization) {
4503 if (IS_INSN(le) && IS_INSN_ID((
INSN *)le, nop)) {
4510#if OPT_INSTRUCTIONS_UNIFICATION
4518 VALUE *operands = 0, *ptr = 0;
4522 for (i = 0; i < size; i++) {
4523 iobj = (
INSN *)list;
4524 argc += iobj->operand_size;
4529 ptr = operands = compile_data_alloc2_type(iseq,
VALUE, argc);
4534 for (i = 0; i < size; i++) {
4535 iobj = (
INSN *)list;
4536 MEMCPY(ptr, iobj->operands,
VALUE, iobj->operand_size);
4537 ptr += iobj->operand_size;
4541 return new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, insn_id, argc, operands);
4553#if OPT_INSTRUCTIONS_UNIFICATION
4559 list = FIRST_ELEMENT(anchor);
4561 if (IS_INSN(list)) {
4562 iobj = (
INSN *)list;
4564 if (unified_insns_data[
id] != 0) {
4565 const int *
const *entry = unified_insns_data[id];
4566 for (j = 1; j < (intptr_t)entry[0]; j++) {
4567 const int *unified = entry[j];
4569 for (k = 2; k < unified[1]; k++) {
4571 ((
INSN *)li)->insn_id != unified[k]) {
4578 new_unified_insn(iseq, unified[0], unified[1] - 1,
4583 niobj->link.next = li;
4602all_string_result_p(
const NODE *node)
4604 if (!node)
return FALSE;
4605 switch (nd_type(node)) {
4606 case NODE_STR:
case NODE_DSTR:
case NODE_FILE:
4608 case NODE_IF:
case NODE_UNLESS:
4609 if (!RNODE_IF(node)->nd_body || !RNODE_IF(node)->nd_else)
return FALSE;
4610 if (all_string_result_p(RNODE_IF(node)->nd_body))
4611 return all_string_result_p(RNODE_IF(node)->nd_else);
4613 case NODE_AND:
case NODE_OR:
4614 if (!RNODE_AND(node)->nd_2nd)
4615 return all_string_result_p(RNODE_AND(node)->nd_1st);
4616 if (!all_string_result_p(RNODE_AND(node)->nd_1st))
4618 return all_string_result_p(RNODE_AND(node)->nd_2nd);
4628 const NODE *lit_node;
4636 VALUE s = rb_str_new_mutable_parser_string(str);
4638 VALUE error = rb_reg_check_preprocess(s);
4639 if (!
NIL_P(error)) {
4640 COMPILE_ERROR(args->iseq, nd_line(node),
"%" PRIsVALUE, error);
4644 if (
NIL_P(args->lit)) {
4646 args->lit_node = node;
4655flush_dstr_fragment(
struct dstr_ctxt *args)
4657 if (!
NIL_P(args->lit)) {
4659 VALUE lit = args->lit;
4661 lit = rb_fstring(lit);
4662 ADD_INSN1(args->ret, args->lit_node, putobject, lit);
4669compile_dstr_fragments_0(
struct dstr_ctxt *args,
const NODE *
const node)
4671 const struct RNode_LIST *list = RNODE_DSTR(node)->nd_next;
4675 CHECK(append_dstr_fragment(args, node, str));
4679 const NODE *
const head = list->nd_head;
4680 if (nd_type_p(head, NODE_STR)) {
4681 CHECK(append_dstr_fragment(args, node, RNODE_STR(head)->
string));
4683 else if (nd_type_p(head, NODE_DSTR)) {
4684 CHECK(compile_dstr_fragments_0(args, head));
4687 flush_dstr_fragment(args);
4689 CHECK(COMPILE(args->ret,
"each string", head));
4701 .iseq = iseq, .ret = ret,
4702 .lit =
Qnil, .lit_node = NULL,
4703 .cnt = 0, .dregx = dregx,
4705 CHECK(compile_dstr_fragments_0(&args, node));
4706 flush_dstr_fragment(&args);
4716 while (node && nd_type_p(node, NODE_BLOCK)) {
4717 CHECK(COMPILE_(ret,
"BLOCK body", RNODE_BLOCK(node)->nd_head,
4718 (RNODE_BLOCK(node)->nd_next ? 1 : popped)));
4719 node = RNODE_BLOCK(node)->nd_next;
4722 CHECK(COMPILE_(ret,
"BLOCK next", RNODE_BLOCK(node)->nd_next, popped));
4731 if (!RNODE_DSTR(node)->nd_next) {
4732 VALUE lit = rb_node_dstr_string_val(node);
4733 ADD_INSN1(ret, node, putstring, lit);
4734 RB_OBJ_SET_SHAREABLE(lit);
4738 CHECK(compile_dstr_fragments(iseq, ret, node, &cnt, FALSE));
4739 ADD_INSN1(ret, node, concatstrings,
INT2FIX(cnt));
4748 int cflag = (int)RNODE_DREGX(node)->as.nd_cflag;
4750 if (!RNODE_DREGX(node)->nd_next) {
4752 VALUE src = rb_node_dregx_string_val(node);
4753 VALUE match = rb_reg_compile(src, cflag, NULL, 0);
4754 RB_OBJ_SET_SHAREABLE(match);
4755 ADD_INSN1(ret, node, putobject, match);
4761 CHECK(compile_dstr_fragments(iseq, ret, node, &cnt, TRUE));
4765 ADD_INSN(ret, node, pop);
4775 const int line = nd_line(node);
4776 LABEL *lend = NEW_LABEL(line);
4777 rb_num_t cnt = ISEQ_FLIP_CNT_INCREMENT(ISEQ_BODY(iseq)->local_iseq)
4778 + VM_SVAR_FLIPFLOP_START;
4781 ADD_INSN2(ret, node, getspecial, key,
INT2FIX(0));
4782 ADD_INSNL(ret, node, branchif, lend);
4785 CHECK(COMPILE(ret,
"flip2 beg", RNODE_FLIP2(node)->nd_beg));
4786 ADD_INSNL(ret, node, branchunless, else_label);
4787 ADD_INSN1(ret, node, putobject,
Qtrue);
4788 ADD_INSN1(ret, node, setspecial, key);
4790 ADD_INSNL(ret, node, jump, then_label);
4794 ADD_LABEL(ret, lend);
4795 CHECK(COMPILE(ret,
"flip2 end", RNODE_FLIP2(node)->nd_end));
4796 ADD_INSNL(ret, node, branchunless, then_label);
4797 ADD_INSN1(ret, node, putobject,
Qfalse);
4798 ADD_INSN1(ret, node, setspecial, key);
4799 ADD_INSNL(ret, node, jump, then_label);
4808#define COMPILE_SINGLE 2
4815 LABEL *label = NEW_LABEL(nd_line(cond));
4816 if (!then_label) then_label = label;
4817 else if (!else_label) else_label = label;
4819 CHECK(compile_branch_condition(iseq, seq, cond, then_label, else_label));
4821 if (LIST_INSN_SIZE_ONE(seq)) {
4822 INSN *insn = (
INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(seq));
4823 if (insn->insn_id == BIN(jump) && (
LABEL *)(insn->operands[0]) == label)
4826 if (!label->refcnt) {
4827 return COMPILE_SINGLE;
4829 ADD_LABEL(seq, label);
4839 DECL_ANCHOR(ignore);
4842 switch (nd_type(cond)) {
4844 CHECK(ok = compile_logical(iseq, ret, RNODE_AND(cond)->nd_1st, NULL, else_label));
4845 cond = RNODE_AND(cond)->nd_2nd;
4846 if (ok == COMPILE_SINGLE) {
4847 INIT_ANCHOR(ignore);
4849 then_label = NEW_LABEL(nd_line(cond));
4853 CHECK(ok = compile_logical(iseq, ret, RNODE_OR(cond)->nd_1st, then_label, NULL));
4854 cond = RNODE_OR(cond)->nd_2nd;
4855 if (ok == COMPILE_SINGLE) {
4856 INIT_ANCHOR(ignore);
4858 else_label = NEW_LABEL(nd_line(cond));
4868 case NODE_IMAGINARY:
4875 ADD_INSNL(ret, cond, jump, then_label);
4880 ADD_INSNL(ret, cond, jump, else_label);
4886 CHECK(COMPILE_POPPED(ret,
"branch condition", cond));
4887 ADD_INSNL(ret, cond, jump, then_label);
4890 CHECK(compile_flip_flop(iseq, ret, cond, TRUE, then_label, else_label));
4893 CHECK(compile_flip_flop(iseq, ret, cond, FALSE, then_label, else_label));
4896 CHECK(compile_defined_expr(iseq, ret, cond,
Qfalse, ret == ignore));
4900 DECL_ANCHOR(cond_seq);
4901 INIT_ANCHOR(cond_seq);
4903 CHECK(COMPILE(cond_seq,
"branch condition", cond));
4905 if (LIST_INSN_SIZE_ONE(cond_seq)) {
4906 INSN *insn = (
INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(cond_seq));
4907 if (insn->insn_id == BIN(putobject)) {
4908 if (
RTEST(insn->operands[0])) {
4909 ADD_INSNL(ret, cond, jump, then_label);
4914 ADD_INSNL(ret, cond, jump, else_label);
4919 ADD_SEQ(ret, cond_seq);
4924 ADD_INSNL(ret, cond, branchunless, else_label);
4925 ADD_INSNL(ret, cond, jump, then_label);
4932keyword_node_p(
const NODE *
const node)
4934 return nd_type_p(node, NODE_HASH) && (RNODE_HASH(node)->nd_brace & HASH_BRACE) != HASH_BRACE;
4940 switch (nd_type(node)) {
4942 return rb_node_sym_string_val(node);
4944 UNKNOWN_NODE(
"get_symbol_value", node,
Qnil);
4951 NODE *node = node_hash->nd_head;
4952 VALUE hash = rb_hash_new();
4955 for (
int i = 0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4956 VALUE key = get_symbol_value(iseq, RNODE_LIST(node)->nd_head);
4957 VALUE idx = rb_hash_aref(hash, key);
4962 rb_hash_aset(hash, key,
INT2FIX(i));
4972 const NODE *
const root_node,
4980 if (RNODE_HASH(root_node)->nd_head && nd_type_p(RNODE_HASH(root_node)->nd_head, NODE_LIST)) {
4981 const NODE *node = RNODE_HASH(root_node)->nd_head;
4985 const NODE *key_node = RNODE_LIST(node)->nd_head;
4989 if (key_node && nd_type_p(key_node, NODE_SYM)) {
4994 *flag |= VM_CALL_KW_SPLAT;
4995 if (seen_nodes > 1 || RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5000 *flag |= VM_CALL_KW_SPLAT_MUT;
5005 node = RNODE_LIST(node)->nd_next;
5006 node = RNODE_LIST(node)->nd_next;
5010 node = RNODE_HASH(root_node)->nd_head;
5013 VALUE key_index = node_hash_unique_key_index(iseq, RNODE_HASH(root_node), &
len);
5016 VALUE *keywords = kw_arg->keywords;
5019 kw_arg->references = 0;
5020 kw_arg->keyword_len =
len;
5022 *kw_arg_ptr = kw_arg;
5024 for (i=0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5025 const NODE *key_node = RNODE_LIST(node)->nd_head;
5026 const NODE *val_node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
5029 keywords[j] = get_symbol_value(iseq, key_node);
5033 NO_CHECK(COMPILE_(ret,
"keyword values", val_node, popped));
5047 for (; node;
len++, node = RNODE_LIST(node)->nd_next) {
5049 EXPECT_NODE(
"compile_args", node, NODE_LIST, -1);
5052 if (RNODE_LIST(node)->nd_next == NULL && keyword_node_p(RNODE_LIST(node)->nd_head)) {
5053 *kwnode_ptr = RNODE_LIST(node)->nd_head;
5056 RUBY_ASSERT(!keyword_node_p(RNODE_LIST(node)->nd_head));
5057 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, FALSE));
5065frozen_string_literal_p(
const rb_iseq_t *iseq)
5067 return ISEQ_COMPILE_DATA(iseq)->option->frozen_string_literal > 0;
5071static_literal_node_p(
const NODE *node,
const rb_iseq_t *iseq,
bool hash_key)
5073 switch (nd_type(node)) {
5081 case NODE_IMAGINARY:
5088 return hash_key || frozen_string_literal_p(iseq);
5097 switch (nd_type(node)) {
5100 VALUE lit = rb_node_integer_literal_val(node);
5106 VALUE lit = rb_node_float_literal_val(node);
5112 case NODE_IMAGINARY:
5121 return rb_node_sym_string_val(node);
5123 return RB_OBJ_SET_SHAREABLE(rb_node_regx_string_val(node));
5125 return rb_node_line_lineno_val(node);
5127 return rb_node_encoding_val(node);
5130 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal ||
RTEST(
ruby_debug)) {
5131 VALUE lit = get_string_value(node);
5132 VALUE str = rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), (
int)nd_line(node));
5133 RB_OBJ_SET_SHAREABLE(str);
5137 return get_string_value(node);
5140 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
5147 const NODE *line_node = node;
5149 if (nd_type_p(node, NODE_ZLIST)) {
5151 ADD_INSN1(ret, line_node, newarray,
INT2FIX(0));
5156 EXPECT_NODE(
"compile_array", node, NODE_LIST, -1);
5159 for (; node; node = RNODE_LIST(node)->nd_next) {
5160 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, popped));
5202 const int max_stack_len = 0x100;
5203 const int min_tmp_ary_len = 0x40;
5207#define FLUSH_CHUNK \
5209 if (first_chunk) ADD_INSN1(ret, line_node, newarray, INT2FIX(stack_len)); \
5210 else ADD_INSN1(ret, line_node, pushtoarray, INT2FIX(stack_len)); \
5211 first_chunk = FALSE; \
5219 if (static_literal_node_p(RNODE_LIST(node)->nd_head, iseq,
false)) {
5221 const NODE *node_tmp = RNODE_LIST(node)->nd_next;
5222 for (; node_tmp && static_literal_node_p(RNODE_LIST(node_tmp)->nd_head, iseq,
false); node_tmp = RNODE_LIST(node_tmp)->nd_next)
5225 if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_ary_len) {
5230 for (; count; count--, node = RNODE_LIST(node)->nd_next)
5231 rb_ary_push(ary, static_literal_value(RNODE_LIST(node)->nd_head, iseq));
5232 RB_OBJ_SET_FROZEN_SHAREABLE(ary);
5237 ADD_INSN1(ret, line_node, duparray, ary);
5238 first_chunk = FALSE;
5241 ADD_INSN1(ret, line_node, putobject, ary);
5242 ADD_INSN(ret, line_node, concattoarray);
5244 RB_OBJ_SET_SHAREABLE(ary);
5250 for (; count; count--, node = RNODE_LIST(node)->nd_next) {
5252 EXPECT_NODE(
"compile_array", node, NODE_LIST, -1);
5255 if (!RNODE_LIST(node)->nd_next && keyword_node_p(RNODE_LIST(node)->nd_head)) {
5257 if (stack_len == 0 && first_chunk) {
5258 ADD_INSN1(ret, line_node, newarray,
INT2FIX(0));
5263 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, 0));
5264 ADD_INSN(ret, line_node, pushtoarraykwsplat);
5268 NO_CHECK(COMPILE_(ret,
"array element", RNODE_LIST(node)->nd_head, 0));
5273 if (stack_len >= max_stack_len) FLUSH_CHUNK;
5283static_literal_node_pair_p(
const NODE *node,
const rb_iseq_t *iseq)
5285 return RNODE_LIST(node)->nd_head && static_literal_node_p(RNODE_LIST(node)->nd_head, iseq,
true) && static_literal_node_p(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq,
false);
5291 const NODE *line_node = node;
5293 node = RNODE_HASH(node)->nd_head;
5295 if (!node || nd_type_p(node, NODE_ZLIST)) {
5297 ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5302 EXPECT_NODE(
"compile_hash", node, NODE_LIST, -1);
5305 for (; node; node = RNODE_LIST(node)->nd_next) {
5306 NO_CHECK(COMPILE_(ret,
"hash element", RNODE_LIST(node)->nd_head, popped));
5329 const int max_stack_len = 0x100;
5330 const int min_tmp_hash_len = 0x800;
5332 int first_chunk = 1;
5333 DECL_ANCHOR(anchor);
5334 INIT_ANCHOR(anchor);
5337#define FLUSH_CHUNK() \
5339 if (first_chunk) { \
5340 APPEND_LIST(ret, anchor); \
5341 ADD_INSN1(ret, line_node, newhash, INT2FIX(stack_len)); \
5344 ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
5345 ADD_INSN(ret, line_node, swap); \
5346 APPEND_LIST(ret, anchor); \
5347 ADD_SEND(ret, line_node, id_core_hash_merge_ptr, INT2FIX(stack_len + 1)); \
5349 INIT_ANCHOR(anchor); \
5350 first_chunk = stack_len = 0; \
5357 if (static_literal_node_pair_p(node, iseq)) {
5359 const NODE *node_tmp = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5360 for (; node_tmp && static_literal_node_pair_p(node_tmp, iseq); node_tmp = RNODE_LIST(RNODE_LIST(node_tmp)->nd_next)->nd_next)
5363 if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_hash_len) {
5368 for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5370 elem[0] = static_literal_value(RNODE_LIST(node)->nd_head, iseq);
5372 elem[1] = static_literal_value(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq);
5379 hash = RB_OBJ_SET_FROZEN_SHAREABLE(
rb_obj_hide(hash));
5384 ADD_INSN1(ret, line_node, duphash, hash);
5388 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5389 ADD_INSN(ret, line_node, swap);
5391 ADD_INSN1(ret, line_node, putobject, hash);
5393 ADD_SEND(ret, line_node, id_core_hash_merge_kwd,
INT2FIX(2));
5400 for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5403 EXPECT_NODE(
"compile_hash", node, NODE_LIST, -1);
5406 if (RNODE_LIST(node)->nd_head) {
5408 NO_CHECK(COMPILE_(anchor,
"hash key element", RNODE_LIST(node)->nd_head, 0));
5409 NO_CHECK(COMPILE_(anchor,
"hash value element", RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, 0));
5413 if (stack_len >= max_stack_len) FLUSH_CHUNK();
5419 const NODE *kw = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
5420 int empty_kw = nd_type_p(kw, NODE_HASH) && (!RNODE_HASH(kw)->nd_head);
5421 int first_kw = first_chunk && stack_len == 0;
5422 int last_kw = !RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5423 int only_kw = last_kw && first_kw;
5425 empty_kw = empty_kw || nd_type_p(kw, NODE_NIL);
5427 if (only_kw && method_call_keywords) {
5435 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5437 else if (first_kw) {
5441 ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5448 if (only_kw && method_call_keywords) {
5454 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5461 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5462 if (first_kw) ADD_INSN1(ret, line_node, newhash,
INT2FIX(0));
5463 else ADD_INSN(ret, line_node, swap);
5465 NO_CHECK(COMPILE(ret,
"keyword splat", kw));
5467 ADD_SEND(ret, line_node, id_core_hash_merge_kwd,
INT2FIX(2));
5482rb_node_case_when_optimizable_literal(
const NODE *
const node)
5484 switch (nd_type(node)) {
5486 return rb_node_integer_literal_val(node);
5488 VALUE v = rb_node_float_literal_val(node);
5497 case NODE_IMAGINARY:
5506 return rb_node_sym_string_val(node);
5508 return rb_node_line_lineno_val(node);
5510 return rb_node_str_string_val(node);
5512 return rb_node_file_path_val(node);
5519 LABEL *l1,
int only_special_literals,
VALUE literals)
5522 const NODE *val = RNODE_LIST(vals)->nd_head;
5523 VALUE lit = rb_node_case_when_optimizable_literal(val);
5526 only_special_literals = 0;
5528 else if (
NIL_P(rb_hash_lookup(literals, lit))) {
5529 rb_hash_aset(literals, lit, (
VALUE)(l1) | 1);
5532 if (nd_type_p(val, NODE_STR) || nd_type_p(val, NODE_FILE)) {
5533 debugp_param(
"nd_lit", get_string_value(val));
5534 lit = get_string_value(val);
5535 ADD_INSN1(cond_seq, val, putobject, lit);
5539 if (!COMPILE(cond_seq,
"when cond", val))
return -1;
5543 ADD_INSN1(cond_seq, vals, topn,
INT2FIX(1));
5544 ADD_CALL(cond_seq, vals, idEqq,
INT2FIX(1));
5545 ADD_INSNL(cond_seq, val, branchif, l1);
5546 vals = RNODE_LIST(vals)->nd_next;
5548 return only_special_literals;
5553 LABEL *l1,
int only_special_literals,
VALUE literals)
5555 const NODE *line_node = vals;
5557 switch (nd_type(vals)) {
5559 if (when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals) < 0)
5563 ADD_INSN (cond_seq, line_node, dup);
5564 CHECK(COMPILE(cond_seq,
"when splat", RNODE_SPLAT(vals)->nd_head));
5565 ADD_INSN1(cond_seq, line_node, splatarray,
Qfalse);
5566 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5567 ADD_INSNL(cond_seq, line_node, branchif, l1);
5570 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_head, l1, only_special_literals, literals));
5571 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_body, l1, only_special_literals, literals));
5574 CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSPUSH(vals)->nd_head, l1, only_special_literals, literals));
5575 ADD_INSN (cond_seq, line_node, dup);
5576 CHECK(COMPILE(cond_seq,
"when argspush body", RNODE_ARGSPUSH(vals)->nd_body));
5577 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
5578 ADD_INSNL(cond_seq, line_node, branchif, l1);
5581 ADD_INSN (cond_seq, line_node, dup);
5582 CHECK(COMPILE(cond_seq,
"when val", vals));
5583 ADD_INSN1(cond_seq, line_node, splatarray,
Qfalse);
5584 ADD_INSN1(cond_seq, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5585 ADD_INSNL(cond_seq, line_node, branchif, l1);
5678 const NODE *line_node;
5693add_masgn_lhs_node(
struct masgn_state *state,
int lhs_pos,
const NODE *line_node,
int argc,
INSN *before_insn)
5696 rb_bug(
"no masgn_state");
5705 memo->before_insn = before_insn;
5706 memo->line_node = line_node;
5707 memo->argn = state->num_args + 1;
5708 memo->num_args = argc;
5709 state->num_args += argc;
5710 memo->lhs_pos = lhs_pos;
5712 if (!state->first_memo) {
5713 state->first_memo = memo;
5716 state->last_memo->next = memo;
5718 state->last_memo = memo;
5728 switch (nd_type(node)) {
5729 case NODE_ATTRASGN: {
5731 const NODE *line_node = node;
5733 CHECK(COMPILE_POPPED(pre,
"masgn lhs (NODE_ATTRASGN)", node));
5735 bool safenav_call =
false;
5737 iobj = (
INSN *)get_prev_insn((
INSN *)insn_element);
5739 ELEM_REMOVE(insn_element);
5740 if (!IS_INSN_ID(iobj, send)) {
5741 safenav_call =
true;
5742 iobj = (
INSN *)get_prev_insn(iobj);
5743 ELEM_INSERT_NEXT(&iobj->link, insn_element);
5745 (pre->last = iobj->link.prev)->next = 0;
5748 int argc = vm_ci_argc(ci) + 1;
5749 ci = ci_argc_set(iseq, ci, argc);
5750 OPERAND_AT(iobj, 0) = (
VALUE)ci;
5754 ADD_INSN(lhs, line_node, swap);
5757 ADD_INSN1(lhs, line_node, topn,
INT2FIX(argc));
5760 if (!add_masgn_lhs_node(state, lhs_pos, line_node, argc, (
INSN *)LAST_ELEMENT(lhs))) {
5764 iobj->link.prev = lhs->last;
5765 lhs->last->next = &iobj->link;
5766 for (lhs->last = &iobj->link; lhs->last->next; lhs->last = lhs->last->next);
5767 if (vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) {
5768 int argc = vm_ci_argc(ci);
5769 bool dupsplat =
false;
5770 ci = ci_argc_set(iseq, ci, argc - 1);
5771 if (!(vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT_MUT)) {
5778 ci = ci_flag_set(iseq, ci, VM_CALL_ARGS_SPLAT_MUT);
5780 OPERAND_AT(iobj, 0) = (
VALUE)ci;
5789 int line_no = nd_line(line_node);
5790 int node_id = nd_node_id(line_node);
5793 INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5794 INSERT_BEFORE_INSN1(iobj, line_no, node_id, splatarray,
Qtrue);
5795 INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5797 INSERT_BEFORE_INSN1(iobj, line_no, node_id, pushtoarray,
INT2FIX(1));
5799 if (!safenav_call) {
5800 ADD_INSN(lhs, line_node, pop);
5802 ADD_INSN(lhs, line_node, pop);
5805 for (
int i=0; i < argc; i++) {
5806 ADD_INSN(post, line_node, pop);
5811 DECL_ANCHOR(nest_rhs);
5812 INIT_ANCHOR(nest_rhs);
5813 DECL_ANCHOR(nest_lhs);
5814 INIT_ANCHOR(nest_lhs);
5816 int prev_level = state->lhs_level;
5817 bool prev_nested = state->nested;
5819 state->lhs_level = lhs_pos - 1;
5820 CHECK(compile_massign0(iseq, pre, nest_rhs, nest_lhs, post, node, state, 1));
5821 state->lhs_level = prev_level;
5822 state->nested = prev_nested;
5824 ADD_SEQ(lhs, nest_rhs);
5825 ADD_SEQ(lhs, nest_lhs);
5829 if (!RNODE_CDECL(node)->nd_vid) {
5833 CHECK(COMPILE_POPPED(pre,
"masgn lhs (NODE_CDECL)", node));
5836 iobj = (
INSN *)insn_element;
5839 ELEM_REMOVE(insn_element);
5840 pre->last = iobj->link.prev;
5843 if (!add_masgn_lhs_node(state, lhs_pos, node, 1, (
INSN *)LAST_ELEMENT(lhs))) {
5847 ADD_INSN(post, node, pop);
5852 DECL_ANCHOR(anchor);
5853 INIT_ANCHOR(anchor);
5854 CHECK(COMPILE_POPPED(anchor,
"masgn lhs", node));
5855 ELEM_REMOVE(FIRST_ELEMENT(anchor));
5856 ADD_SEQ(lhs, anchor);
5867 CHECK(compile_massign_opt_lhs(iseq, ret, RNODE_LIST(lhsn)->nd_next));
5868 CHECK(compile_massign_lhs(iseq, ret, ret, ret, ret, RNODE_LIST(lhsn)->nd_head, NULL, 0));
5875 const NODE *rhsn,
const NODE *orig_lhsn)
5878 const int memsize = numberof(mem);
5880 int llen = 0, rlen = 0;
5882 const NODE *lhsn = orig_lhsn;
5884#define MEMORY(v) { \
5886 if (memindex == memsize) return 0; \
5887 for (i=0; i<memindex; i++) { \
5888 if (mem[i] == (v)) return 0; \
5890 mem[memindex++] = (v); \
5893 if (rhsn == 0 || !nd_type_p(rhsn, NODE_LIST)) {
5898 const NODE *ln = RNODE_LIST(lhsn)->nd_head;
5899 switch (nd_type(ln)) {
5904 MEMORY(get_nd_vid(ln));
5909 lhsn = RNODE_LIST(lhsn)->nd_next;
5915 NO_CHECK(COMPILE_POPPED(ret,
"masgn val (popped)", RNODE_LIST(rhsn)->nd_head));
5918 NO_CHECK(COMPILE(ret,
"masgn val", RNODE_LIST(rhsn)->nd_head));
5920 rhsn = RNODE_LIST(rhsn)->nd_next;
5925 for (i=0; i<llen-rlen; i++) {
5926 ADD_INSN(ret, orig_lhsn, putnil);
5930 compile_massign_opt_lhs(iseq, ret, orig_lhsn);
5937 const NODE *rhsn = RNODE_MASGN(node)->nd_value;
5938 const NODE *splatn = RNODE_MASGN(node)->nd_args;
5939 const NODE *lhsn = RNODE_MASGN(node)->nd_head;
5940 const NODE *lhsn_count = lhsn;
5941 int lhs_splat = (splatn && NODE_NAMED_REST_P(splatn)) ? 1 : 0;
5946 while (lhsn_count) {
5948 lhsn_count = RNODE_LIST(lhsn_count)->nd_next;
5951 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(lhsn)->nd_head, state, (llen - lpos) + lhs_splat + state->lhs_level));
5953 lhsn = RNODE_LIST(lhsn)->nd_next;
5957 if (nd_type_p(splatn, NODE_POSTARG)) {
5959 const NODE *postn = RNODE_POSTARG(splatn)->nd_2nd;
5960 const NODE *restn = RNODE_POSTARG(splatn)->nd_1st;
5961 int plen = (int)RNODE_LIST(postn)->as.nd_alen;
5963 int flag = 0x02 | (NODE_NAMED_REST_P(restn) ? 0x01 : 0x00);
5965 ADD_INSN2(lhs, splatn, expandarray,
INT2FIX(plen),
INT2FIX(flag));
5967 if (NODE_NAMED_REST_P(restn)) {
5968 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, restn, state, 1 + plen + state->lhs_level));
5971 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(postn)->nd_head, state, (plen - ppos) + state->lhs_level));
5973 postn = RNODE_LIST(postn)->nd_next;
5978 CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, splatn, state, 1 + state->lhs_level));
5982 if (!state->nested) {
5983 NO_CHECK(COMPILE(rhs,
"normal masgn rhs", rhsn));
5987 ADD_INSN(rhs, node, dup);
5989 ADD_INSN2(rhs, node, expandarray,
INT2FIX(llen),
INT2FIX(lhs_splat));
5996 if (!popped || RNODE_MASGN(node)->nd_args || !compile_massign_opt(iseq, ret, RNODE_MASGN(node)->nd_value, RNODE_MASGN(node)->nd_head)) {
5998 state.lhs_level = popped ? 0 : 1;
6001 state.first_memo = NULL;
6002 state.last_memo = NULL;
6012 int ok = compile_massign0(iseq, pre, rhs, lhs, post, node, &state, popped);
6016 VALUE topn_arg =
INT2FIX((state.num_args - memo->argn) + memo->lhs_pos);
6017 for (
int i = 0; i < memo->num_args; i++) {
6018 INSERT_BEFORE_INSN1(memo->before_insn, nd_line(memo->line_node), nd_node_id(memo->line_node), topn, topn_arg);
6020 tmp_memo = memo->next;
6029 if (!popped && state.num_args >= 1) {
6031 ADD_INSN1(ret, node, setn,
INT2FIX(state.num_args));
6043 switch (nd_type(node)) {
6045 rb_ary_unshift(arr,
ID2SYM(RNODE_CONST(node)->nd_vid));
6046 RB_OBJ_SET_SHAREABLE(arr);
6049 rb_ary_unshift(arr,
ID2SYM(RNODE_COLON3(node)->nd_mid));
6050 rb_ary_unshift(arr,
ID2SYM(idNULL));
6051 RB_OBJ_SET_SHAREABLE(arr);
6054 rb_ary_unshift(arr,
ID2SYM(RNODE_COLON2(node)->nd_mid));
6055 node = RNODE_COLON2(node)->nd_head;
6064compile_const_prefix(
rb_iseq_t *iseq,
const NODE *
const node,
6067 switch (nd_type(node)) {
6069 debugi(
"compile_const_prefix - colon", RNODE_CONST(node)->nd_vid);
6070 ADD_INSN1(body, node, putobject,
Qtrue);
6071 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_CONST(node)->nd_vid));
6074 debugi(
"compile_const_prefix - colon3", RNODE_COLON3(node)->nd_mid);
6075 ADD_INSN(body, node, pop);
6076 ADD_INSN1(body, node, putobject,
rb_cObject);
6077 ADD_INSN1(body, node, putobject,
Qtrue);
6078 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_COLON3(node)->nd_mid));
6081 CHECK(compile_const_prefix(iseq, RNODE_COLON2(node)->nd_head, pref, body));
6082 debugi(
"compile_const_prefix - colon2", RNODE_COLON2(node)->nd_mid);
6083 ADD_INSN1(body, node, putobject,
Qfalse);
6084 ADD_INSN1(body, node, getconstant,
ID2SYM(RNODE_COLON2(node)->nd_mid));
6087 CHECK(COMPILE(pref,
"const colon2 prefix", node));
6094cpath_const_p(
const NODE *node)
6096 switch (nd_type(node)) {
6101 if (RNODE_COLON2(node)->nd_head) {
6102 return cpath_const_p(RNODE_COLON2(node)->nd_head);
6113 if (nd_type_p(cpath, NODE_COLON3)) {
6115 ADD_INSN1(ret, cpath, putobject,
rb_cObject);
6116 return VM_DEFINECLASS_FLAG_SCOPED;
6118 else if (nd_type_p(cpath, NODE_COLON2) && RNODE_COLON2(cpath)->nd_head) {
6120 NO_CHECK(COMPILE(ret,
"nd_else->nd_head", RNODE_COLON2(cpath)->nd_head));
6121 int flags = VM_DEFINECLASS_FLAG_SCOPED;
6122 if (!cpath_const_p(RNODE_COLON2(cpath)->nd_head)) {
6123 flags |= VM_DEFINECLASS_FLAG_DYNAMIC_CREF;
6129 ADD_INSN1(ret, cpath, putspecialobject,
6130 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
6136private_recv_p(
const NODE *node)
6138 NODE *recv = get_nd_recv(node);
6139 if (recv && nd_type_p(recv, NODE_SELF)) {
6140 return RNODE_SELF(recv)->nd_state != 0;
6147 const NODE *
const node,
LABEL **lfinish,
VALUE needstr,
bool ignore);
6150compile_call(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
const enum node_type
type,
const NODE *
const line_node,
int popped,
bool assume_receiver);
6157 enum defined_type expr_type = DEFINED_NOT_DEFINED;
6158 enum node_type
type;
6159 const int line = nd_line(node);
6160 const NODE *line_node = node;
6162 switch (
type = nd_type(node)) {
6166 expr_type = DEFINED_NIL;
6169 expr_type = DEFINED_SELF;
6172 expr_type = DEFINED_TRUE;
6175 expr_type = DEFINED_FALSE;
6180 const NODE *vals = (nd_type(node) == NODE_HASH) ? RNODE_HASH(node)->nd_head : node;
6184 if (RNODE_LIST(vals)->nd_head) {
6185 defined_expr0(iseq, ret, RNODE_LIST(vals)->nd_head, lfinish,
Qfalse,
false);
6188 lfinish[1] = NEW_LABEL(line);
6190 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6192 }
while ((vals = RNODE_LIST(vals)->nd_next) != NULL);
6205 case NODE_IMAGINARY:
6210 expr_type = DEFINED_EXPR;
6214 defined_expr0(iseq, ret, RNODE_LIST(node)->nd_head, lfinish,
Qfalse,
false);
6216 lfinish[1] = NEW_LABEL(line);
6218 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6219 expr_type = DEFINED_EXPR;
6225 expr_type = DEFINED_LVAR;
6228#define PUSH_VAL(type) (needstr == Qfalse ? Qtrue : rb_iseq_defined_string(type))
6230 ADD_INSN3(ret, line_node, definedivar,
6231 ID2SYM(RNODE_IVAR(node)->nd_vid), get_ivar_ic_value(iseq,RNODE_IVAR(node)->nd_vid), PUSH_VAL(DEFINED_IVAR));
6235 ADD_INSN(ret, line_node, putnil);
6236 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_GVAR),
6237 ID2SYM(RNODE_GVAR(node)->nd_vid), PUSH_VAL(DEFINED_GVAR));
6241 ADD_INSN(ret, line_node, putnil);
6242 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CVAR),
6243 ID2SYM(RNODE_CVAR(node)->nd_vid), PUSH_VAL(DEFINED_CVAR));
6247 ADD_INSN(ret, line_node, putnil);
6248 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CONST),
6249 ID2SYM(RNODE_CONST(node)->nd_vid), PUSH_VAL(DEFINED_CONST));
6253 lfinish[1] = NEW_LABEL(line);
6255 defined_expr0(iseq, ret, RNODE_COLON2(node)->nd_head, lfinish,
Qfalse,
false);
6256 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6257 NO_CHECK(COMPILE(ret,
"defined/colon2#nd_head", RNODE_COLON2(node)->nd_head));
6260 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_CONST_FROM),
6261 ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
6264 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_METHOD),
6265 ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_METHOD));
6269 ADD_INSN1(ret, line_node, putobject,
rb_cObject);
6270 ADD_INSN3(ret, line_node, defined,
6271 INT2FIX(DEFINED_CONST_FROM),
ID2SYM(RNODE_COLON3(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
6279 case NODE_ATTRASGN:{
6280 const int explicit_receiver =
6281 (
type == NODE_CALL ||
type == NODE_OPCALL ||
6282 (
type == NODE_ATTRASGN && !private_recv_p(node)));
6284 if (get_nd_args(node) || explicit_receiver) {
6286 lfinish[1] = NEW_LABEL(line);
6289 lfinish[2] = NEW_LABEL(line);
6292 if (get_nd_args(node)) {
6293 defined_expr0(iseq, ret, get_nd_args(node), lfinish,
Qfalse,
false);
6294 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6296 if (explicit_receiver) {
6297 defined_expr0(iseq, ret, get_nd_recv(node), lfinish,
Qfalse,
true);
6298 switch (nd_type(get_nd_recv(node))) {
6304 ADD_INSNL(ret, line_node, branchunless, lfinish[2]);
6305 compile_call(iseq, ret, get_nd_recv(node), nd_type(get_nd_recv(node)), line_node, 0,
true);
6308 ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6309 NO_CHECK(COMPILE(ret,
"defined/recv", get_nd_recv(node)));
6313 ADD_INSN(ret, line_node, dup);
6315 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_METHOD),
6316 ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6319 ADD_INSN(ret, line_node, putself);
6321 ADD_INSN(ret, line_node, dup);
6323 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_FUNC),
6324 ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6330 ADD_INSN(ret, line_node, putnil);
6331 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_YIELD), 0,
6332 PUSH_VAL(DEFINED_YIELD));
6333 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
6338 ADD_INSN(ret, line_node, putnil);
6339 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_REF),
6340 INT2FIX((RNODE_BACK_REF(node)->nd_nth << 1) | (
type == NODE_BACK_REF)),
6341 PUSH_VAL(DEFINED_GVAR));
6346 ADD_INSN(ret, line_node, putnil);
6347 ADD_INSN3(ret, line_node, defined,
INT2FIX(DEFINED_ZSUPER), 0,
6348 PUSH_VAL(DEFINED_ZSUPER));
6354 case NODE_OP_ASGN_OR:
6355 case NODE_OP_ASGN_AND:
6364 expr_type = DEFINED_ASGN;
6371 VALUE str = rb_iseq_defined_string(expr_type);
6372 ADD_INSN1(ret, line_node, putobject, str);
6375 ADD_INSN1(ret, line_node, putobject,
Qtrue);
6382 ADD_SYNTHETIC_INSN(ret, 0, -1, putnil);
6383 iseq_set_exception_local_table(iseq);
6388 const NODE *
const node,
LABEL **lfinish,
VALUE needstr,
bool ignore)
6391 defined_expr0(iseq, ret, node, lfinish, needstr,
false);
6393 int line = nd_line(node);
6394 LABEL *lstart = NEW_LABEL(line);
6395 LABEL *lend = NEW_LABEL(line);
6398 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq, NULL);
6399 rescue = NEW_CHILD_ISEQ_WITH_CALLBACK(ifunc,
6401 ISEQ_BODY(iseq)->location.label),
6402 ISEQ_TYPE_RESCUE, 0);
6403 lstart->rescued = LABEL_RESCUE_BEG;
6404 lend->rescued = LABEL_RESCUE_END;
6405 APPEND_LABEL(ret, lcur, lstart);
6406 ADD_LABEL(ret, lend);
6408 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lfinish[1]);
6416 const int line = nd_line(node);
6417 const NODE *line_node = node;
6418 if (!RNODE_DEFINED(node)->nd_head) {
6419 VALUE str = rb_iseq_defined_string(DEFINED_NIL);
6420 ADD_INSN1(ret, line_node, putobject, str);
6425 lfinish[0] = NEW_LABEL(line);
6428 defined_expr(iseq, ret, RNODE_DEFINED(node)->nd_head, lfinish, needstr, ignore);
6430 ELEM_INSERT_NEXT(last, &new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(putnil), 0)->link);
6431 ADD_INSN(ret, line_node, swap);
6433 ADD_LABEL(ret, lfinish[2]);
6435 ADD_INSN(ret, line_node, pop);
6436 ADD_LABEL(ret, lfinish[1]);
6438 ADD_LABEL(ret, lfinish[0]);
6444make_name_for_block(
const rb_iseq_t *orig_iseq)
6449 if (ISEQ_BODY(orig_iseq)->parent_iseq != 0) {
6450 while (ISEQ_BODY(orig_iseq)->local_iseq != iseq) {
6451 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_BLOCK) {
6454 iseq = ISEQ_BODY(iseq)->parent_iseq;
6459 return rb_sprintf(
"block in %"PRIsVALUE, ISEQ_BODY(iseq)->location.label);
6462 return rb_sprintf(
"block (%d levels) in %"PRIsVALUE, level, ISEQ_BODY(iseq)->location.label);
6471 enl->ensure_node = node;
6472 enl->prev = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6474 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl;
6484 while (erange->next != 0) {
6485 erange = erange->next;
6489 ne->end = erange->end;
6490 erange->end = lstart;
6496can_add_ensure_iseq(
const rb_iseq_t *iseq)
6499 if (ISEQ_COMPILE_DATA(iseq)->in_rescue && (e = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack) != NULL) {
6501 if (e->ensure_node)
return false;
6514 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6516 DECL_ANCHOR(ensure);
6518 INIT_ANCHOR(ensure);
6520 if (enlp->erange != NULL) {
6521 DECL_ANCHOR(ensure_part);
6522 LABEL *lstart = NEW_LABEL(0);
6523 LABEL *lend = NEW_LABEL(0);
6524 INIT_ANCHOR(ensure_part);
6526 add_ensure_range(iseq, enlp->erange, lstart, lend);
6528 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enlp->prev;
6529 ADD_LABEL(ensure_part, lstart);
6530 NO_CHECK(COMPILE_POPPED(ensure_part,
"ensure part", enlp->ensure_node));
6531 ADD_LABEL(ensure_part, lend);
6532 ADD_SEQ(ensure, ensure_part);
6541 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = prev_enlp;
6542 ADD_SEQ(ret, ensure);
6547check_keyword(
const NODE *node)
6551 if (nd_type_p(node, NODE_LIST)) {
6552 while (RNODE_LIST(node)->nd_next) {
6553 node = RNODE_LIST(node)->nd_next;
6555 node = RNODE_LIST(node)->nd_head;
6558 return keyword_node_p(node);
6563keyword_node_single_splat_p(
NODE *kwnode)
6567 NODE *node = RNODE_HASH(kwnode)->nd_head;
6568 return RNODE_LIST(node)->nd_head == NULL &&
6569 RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next == NULL;
6574 NODE *kwnode,
unsigned int *flag_ptr)
6576 *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6577 ADD_INSN1(args, argn, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6578 ADD_INSN1(args, argn, newhash,
INT2FIX(0));
6579 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6580 ADD_SEND(args, argn, id_core_hash_merge_kwd,
INT2FIX(2));
6583#define SPLATARRAY_FALSE 0
6584#define SPLATARRAY_TRUE 1
6585#define DUP_SINGLE_KW_SPLAT 2
6589 unsigned int *dup_rest,
unsigned int *flag_ptr,
struct rb_callinfo_kwarg **kwarg_ptr)
6591 if (!argn)
return 0;
6593 NODE *kwnode = NULL;
6595 switch (nd_type(argn)) {
6598 int len = compile_args(iseq, args, argn, &kwnode);
6599 RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_ARGS_SPLAT) == 0);
6602 if (compile_keyword_arg(iseq, args, kwnode, kwarg_ptr, flag_ptr)) {
6606 if (keyword_node_single_splat_p(kwnode) && (*dup_rest & DUP_SINGLE_KW_SPLAT)) {
6607 compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6610 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6619 NO_CHECK(COMPILE(args,
"args (splat)", RNODE_SPLAT(argn)->nd_head));
6620 ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6621 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6622 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6623 RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_KW_SPLAT) == 0);
6626 case NODE_ARGSCAT: {
6627 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6628 int argc = setup_args_core(iseq, args, RNODE_ARGSCAT(argn)->nd_head, dup_rest, NULL, NULL);
6629 bool args_pushed =
false;
6631 if (nd_type_p(RNODE_ARGSCAT(argn)->nd_body, NODE_LIST)) {
6632 int rest_len = compile_args(iseq, args, RNODE_ARGSCAT(argn)->nd_body, &kwnode);
6633 if (kwnode) rest_len--;
6634 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(rest_len));
6638 RUBY_ASSERT(!check_keyword(RNODE_ARGSCAT(argn)->nd_body));
6639 NO_CHECK(COMPILE(args,
"args (cat: splat)", RNODE_ARGSCAT(argn)->nd_body));
6642 if (nd_type_p(RNODE_ARGSCAT(argn)->nd_head, NODE_LIST)) {
6643 ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6644 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6647 else if (!args_pushed) {
6648 ADD_INSN(args, argn, concattoarray);
6654 *flag_ptr |= VM_CALL_KW_SPLAT;
6655 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6661 case NODE_ARGSPUSH: {
6662 if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6663 int argc = setup_args_core(iseq, args, RNODE_ARGSPUSH(argn)->nd_head, dup_rest, NULL, NULL);
6665 if (nd_type_p(RNODE_ARGSPUSH(argn)->nd_body, NODE_LIST)) {
6666 int rest_len = compile_args(iseq, args, RNODE_ARGSPUSH(argn)->nd_body, &kwnode);
6667 if (kwnode) rest_len--;
6668 ADD_INSN1(args, argn, newarray,
INT2FIX(rest_len));
6669 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(1));
6672 if (keyword_node_p(RNODE_ARGSPUSH(argn)->nd_body)) {
6673 kwnode = RNODE_ARGSPUSH(argn)->nd_body;
6676 NO_CHECK(COMPILE(args,
"args (cat: splat)", RNODE_ARGSPUSH(argn)->nd_body));
6677 ADD_INSN1(args, argn, pushtoarray,
INT2FIX(1));
6683 *flag_ptr |= VM_CALL_KW_SPLAT;
6684 if (!keyword_node_single_splat_p(kwnode)) {
6685 *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6686 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6688 else if (*dup_rest & DUP_SINGLE_KW_SPLAT) {
6689 compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6692 compile_hash(iseq, args, kwnode, TRUE, FALSE);
6700 UNKNOWN_NODE(
"setup_arg", argn,
Qnil);
6706setup_args_splat_mut(
unsigned int *flag,
int dup_rest,
int initial_dup_rest)
6708 if ((*flag & VM_CALL_ARGS_SPLAT) && dup_rest != initial_dup_rest) {
6709 *flag |= VM_CALL_ARGS_SPLAT_MUT;
6714setup_args_dup_rest_p(
const NODE *argn)
6716 switch(nd_type(argn)) {
6727 case NODE_IMAGINARY:
6740 return setup_args_dup_rest_p(RNODE_COLON2(argn)->nd_head);
6743 if (setup_args_dup_rest_p(RNODE_LIST(argn)->nd_head)) {
6746 argn = RNODE_LIST(argn)->nd_next;
6759 unsigned int dup_rest = SPLATARRAY_TRUE, initial_dup_rest;
6762 const NODE *check_arg = nd_type_p(argn, NODE_BLOCK_PASS) ?
6763 RNODE_BLOCK_PASS(argn)->nd_head : argn;
6766 switch(nd_type(check_arg)) {
6769 dup_rest = SPLATARRAY_FALSE;
6773 dup_rest = !nd_type_p(RNODE_ARGSCAT(check_arg)->nd_head, NODE_LIST);
6775 case(NODE_ARGSPUSH):
6777 dup_rest = !((nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_SPLAT) ||
6778 (nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_ARGSCAT) &&
6779 nd_type_p(RNODE_ARGSCAT(RNODE_ARGSPUSH(check_arg)->nd_head)->nd_head, NODE_LIST))) &&
6780 nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_body, NODE_HASH) &&
6781 !RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_brace);
6783 if (dup_rest == SPLATARRAY_FALSE) {
6785 NODE *node = RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_head;
6787 NODE *key_node = RNODE_LIST(node)->nd_head;
6788 if (key_node && setup_args_dup_rest_p(key_node)) {
6789 dup_rest = SPLATARRAY_TRUE;
6793 node = RNODE_LIST(node)->nd_next;
6794 NODE *value_node = RNODE_LIST(node)->nd_head;
6795 if (setup_args_dup_rest_p(value_node)) {
6796 dup_rest = SPLATARRAY_TRUE;
6800 node = RNODE_LIST(node)->nd_next;
6809 if (check_arg != argn && setup_args_dup_rest_p(RNODE_BLOCK_PASS(argn)->nd_body)) {
6811 dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
6814 initial_dup_rest = dup_rest;
6816 if (argn && nd_type_p(argn, NODE_BLOCK_PASS)) {
6817 DECL_ANCHOR(arg_block);
6818 INIT_ANCHOR(arg_block);
6820 if (RNODE_BLOCK_PASS(argn)->forwarding && ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
6821 int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size;
6823 RUBY_ASSERT(nd_type_p(RNODE_BLOCK_PASS(argn)->nd_head, NODE_ARGSPUSH));
6824 const NODE * arg_node =
6825 RNODE_ARGSPUSH(RNODE_BLOCK_PASS(argn)->nd_head)->nd_head;
6832 if (nd_type_p(arg_node, NODE_ARGSCAT)) {
6833 argc += setup_args_core(iseq, args, RNODE_ARGSCAT(arg_node)->nd_head, &dup_rest, flag, keywords);
6836 *flag |= VM_CALL_FORWARDING;
6838 ADD_GETLOCAL(args, argn, idx, get_lvar_level(iseq));
6839 setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6843 *flag |= VM_CALL_ARGS_BLOCKARG;
6845 NO_CHECK(COMPILE(arg_block,
"block", RNODE_BLOCK_PASS(argn)->nd_body));
6848 if (LIST_INSN_SIZE_ONE(arg_block)) {
6850 if (IS_INSN(elem)) {
6852 if (iobj->insn_id == BIN(getblockparam)) {
6853 iobj->insn_id = BIN(getblockparamproxy);
6857 ret =
INT2FIX(setup_args_core(iseq, args, RNODE_BLOCK_PASS(argn)->nd_head, &dup_rest, flag, keywords));
6858 ADD_SEQ(args, arg_block);
6861 ret =
INT2FIX(setup_args_core(iseq, args, argn, &dup_rest, flag, keywords));
6863 setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6870 const NODE *body = ptr;
6871 int line = nd_line(body);
6873 const rb_iseq_t *block = NEW_CHILD_ISEQ(body, make_name_for_block(ISEQ_BODY(iseq)->parent_iseq), ISEQ_TYPE_BLOCK, line);
6875 ADD_INSN1(ret, body, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6876 ADD_CALL_WITH_BLOCK(ret, body, id_core_set_postexe, argc, block);
6878 iseq_set_local_table(iseq, 0, 0);
6886 int line = nd_line(node);
6887 const NODE *line_node = node;
6888 LABEL *fail_label = NEW_LABEL(line), *end_label = NEW_LABEL(line);
6890#if !(defined(NAMED_CAPTURE_BY_SVAR) && NAMED_CAPTURE_BY_SVAR-0)
6891 ADD_INSN1(ret, line_node, getglobal,
ID2SYM(idBACKREF));
6895 ADD_INSN(ret, line_node, dup);
6896 ADD_INSNL(ret, line_node, branchunless, fail_label);
6898 for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6900 if (RNODE_BLOCK(vars)->nd_next) {
6901 ADD_INSN(ret, line_node, dup);
6904 NO_CHECK(COMPILE_POPPED(ret,
"capture", RNODE_BLOCK(vars)->nd_head));
6906 cap = new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), idAREF,
INT2FIX(1),
6909#if !defined(NAMED_CAPTURE_SINGLE_OPT) || NAMED_CAPTURE_SINGLE_OPT-0
6910 if (!RNODE_BLOCK(vars)->nd_next && vars == node) {
6915 ADD_INSNL(nom, line_node, jump, end_label);
6916 ADD_LABEL(nom, fail_label);
6918 ADD_INSN(nom, line_node, pop);
6919 ADD_INSN(nom, line_node, putnil);
6921 ADD_LABEL(nom, end_label);
6922 (nom->last->next = cap->link.next)->prev = nom->last;
6923 (cap->link.next = nom->anchor.next)->prev = &cap->link;
6928 ADD_INSNL(ret, line_node, jump, end_label);
6929 ADD_LABEL(ret, fail_label);
6930 ADD_INSN(ret, line_node, pop);
6931 for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6933 NO_CHECK(COMPILE_POPPED(ret,
"capture", RNODE_BLOCK(vars)->nd_head));
6935 ((
INSN*)last)->insn_id = BIN(putnil);
6936 ((
INSN*)last)->operand_size = 0;
6938 ADD_LABEL(ret, end_label);
6942optimizable_range_item_p(
const NODE *n)
6944 if (!n)
return FALSE;
6945 switch (nd_type(n)) {
6958optimized_range_item(
const NODE *n)
6960 switch (nd_type(n)) {
6962 return rb_node_line_lineno_val(n);
6964 return rb_node_integer_literal_val(n);
6966 return rb_node_float_literal_val(n);
6968 return rb_node_rational_literal_val(n);
6969 case NODE_IMAGINARY:
6970 return rb_node_imaginary_literal_val(n);
6974 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(n)));
6981 const NODE *
const node_body =
type == NODE_IF ? RNODE_IF(node)->nd_body : RNODE_UNLESS(node)->nd_else;
6982 const NODE *
const node_else =
type == NODE_IF ? RNODE_IF(node)->nd_else : RNODE_UNLESS(node)->nd_body;
6984 const int line = nd_line(node);
6985 const NODE *line_node = node;
6986 DECL_ANCHOR(cond_seq);
6987 LABEL *then_label, *else_label, *end_label;
6990 INIT_ANCHOR(cond_seq);
6991 then_label = NEW_LABEL(line);
6992 else_label = NEW_LABEL(line);
6995 NODE *cond = RNODE_IF(node)->nd_cond;
6996 if (nd_type(cond) == NODE_BLOCK) {
6997 cond = RNODE_BLOCK(cond)->nd_head;
7000 CHECK(compile_branch_condition(iseq, cond_seq, cond, then_label, else_label));
7001 ADD_SEQ(ret, cond_seq);
7003 if (then_label->refcnt && else_label->refcnt) {
7004 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
type == NODE_IF ?
"if" :
"unless");
7007 if (then_label->refcnt) {
7008 ADD_LABEL(ret, then_label);
7010 DECL_ANCHOR(then_seq);
7011 INIT_ANCHOR(then_seq);
7012 CHECK(COMPILE_(then_seq,
"then", node_body, popped));
7014 if (else_label->refcnt) {
7015 const NODE *
const coverage_node = node_body ? node_body : node;
7016 add_trace_branch_coverage(
7019 nd_code_loc(coverage_node),
7020 nd_node_id(coverage_node),
7022 type == NODE_IF ?
"then" :
"else",
7024 end_label = NEW_LABEL(line);
7025 ADD_INSNL(then_seq, line_node, jump, end_label);
7027 ADD_INSN(then_seq, line_node, pop);
7030 ADD_SEQ(ret, then_seq);
7033 if (else_label->refcnt) {
7034 ADD_LABEL(ret, else_label);
7036 DECL_ANCHOR(else_seq);
7037 INIT_ANCHOR(else_seq);
7038 CHECK(COMPILE_(else_seq,
"else", node_else, popped));
7040 if (then_label->refcnt) {
7041 const NODE *
const coverage_node = node_else ? node_else : node;
7042 add_trace_branch_coverage(
7045 nd_code_loc(coverage_node),
7046 nd_node_id(coverage_node),
7048 type == NODE_IF ?
"else" :
"then",
7051 ADD_SEQ(ret, else_seq);
7055 ADD_LABEL(ret, end_label);
7065 const NODE *node = orig_node;
7066 LABEL *endlabel, *elselabel;
7068 DECL_ANCHOR(body_seq);
7069 DECL_ANCHOR(cond_seq);
7070 int only_special_literals = 1;
7071 VALUE literals = rb_hash_new();
7073 enum node_type
type;
7074 const NODE *line_node;
7079 INIT_ANCHOR(body_seq);
7080 INIT_ANCHOR(cond_seq);
7082 RHASH_TBL_RAW(literals)->type = &cdhash_type;
7084 CHECK(COMPILE(head,
"case base", RNODE_CASE(node)->nd_head));
7086 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"case");
7088 node = RNODE_CASE(node)->nd_body;
7089 EXPECT_NODE(
"NODE_CASE", node, NODE_WHEN, COMPILE_NG);
7090 type = nd_type(node);
7091 line = nd_line(node);
7094 endlabel = NEW_LABEL(line);
7095 elselabel = NEW_LABEL(line);
7099 while (
type == NODE_WHEN) {
7102 l1 = NEW_LABEL(line);
7103 ADD_LABEL(body_seq, l1);
7104 ADD_INSN(body_seq, line_node, pop);
7106 const NODE *
const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
7107 add_trace_branch_coverage(
7110 nd_code_loc(coverage_node),
7111 nd_node_id(coverage_node),
7116 CHECK(COMPILE_(body_seq,
"when body", RNODE_WHEN(node)->nd_body, popped));
7117 ADD_INSNL(body_seq, line_node, jump, endlabel);
7119 vals = RNODE_WHEN(node)->nd_head;
7121 switch (nd_type(vals)) {
7123 only_special_literals = when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals);
7124 if (only_special_literals < 0)
return COMPILE_NG;
7129 only_special_literals = 0;
7130 CHECK(when_splat_vals(iseq, cond_seq, vals, l1, only_special_literals, literals));
7133 UNKNOWN_NODE(
"NODE_CASE", vals, COMPILE_NG);
7137 EXPECT_NODE_NONULL(
"NODE_CASE", node, NODE_LIST, COMPILE_NG);
7140 node = RNODE_WHEN(node)->nd_next;
7144 type = nd_type(node);
7145 line = nd_line(node);
7150 ADD_LABEL(cond_seq, elselabel);
7151 ADD_INSN(cond_seq, line_node, pop);
7152 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id,
"else", branches);
7153 CHECK(COMPILE_(cond_seq,
"else", node, popped));
7154 ADD_INSNL(cond_seq, line_node, jump, endlabel);
7157 debugs(
"== else (implicit)\n");
7158 ADD_LABEL(cond_seq, elselabel);
7159 ADD_INSN(cond_seq, orig_node, pop);
7160 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id,
"else", branches);
7162 ADD_INSN(cond_seq, orig_node, putnil);
7164 ADD_INSNL(cond_seq, orig_node, jump, endlabel);
7167 if (only_special_literals && ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
7168 ADD_INSN(ret, orig_node, dup);
7170 ADD_INSN2(ret, orig_node, opt_case_dispatch, literals, elselabel);
7172 LABEL_REF(elselabel);
7175 ADD_SEQ(ret, cond_seq);
7176 ADD_SEQ(ret, body_seq);
7177 ADD_LABEL(ret, endlabel);
7186 const NODE *node = RNODE_CASE2(orig_node)->nd_body;
7188 DECL_ANCHOR(body_seq);
7192 branches = decl_branch_base(iseq, PTR2NUM(orig_node), nd_code_loc(orig_node),
"case");
7194 INIT_ANCHOR(body_seq);
7195 endlabel = NEW_LABEL(nd_line(node));
7197 while (node && nd_type_p(node, NODE_WHEN)) {
7198 const int line = nd_line(node);
7199 LABEL *l1 = NEW_LABEL(line);
7200 ADD_LABEL(body_seq, l1);
7202 const NODE *
const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
7203 add_trace_branch_coverage(
7206 nd_code_loc(coverage_node),
7207 nd_node_id(coverage_node),
7212 CHECK(COMPILE_(body_seq,
"when", RNODE_WHEN(node)->nd_body, popped));
7213 ADD_INSNL(body_seq, node, jump, endlabel);
7215 vals = RNODE_WHEN(node)->nd_head;
7217 EXPECT_NODE_NONULL(
"NODE_WHEN", node, NODE_LIST, COMPILE_NG);
7219 switch (nd_type(vals)) {
7223 val = RNODE_LIST(vals)->nd_head;
7224 lnext = NEW_LABEL(nd_line(val));
7225 debug_compile(
"== when2\n", (
void)0);
7226 CHECK(compile_branch_condition(iseq, ret, val, l1, lnext));
7227 ADD_LABEL(ret, lnext);
7228 vals = RNODE_LIST(vals)->nd_next;
7234 ADD_INSN(ret, vals, putnil);
7235 CHECK(COMPILE(ret,
"when2/cond splat", vals));
7236 ADD_INSN1(ret, vals, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_WHEN | VM_CHECKMATCH_ARRAY));
7237 ADD_INSNL(ret, vals, branchif, l1);
7240 UNKNOWN_NODE(
"NODE_WHEN", vals, COMPILE_NG);
7242 node = RNODE_WHEN(node)->nd_next;
7245 const NODE *
const coverage_node = node ? node : orig_node;
7246 add_trace_branch_coverage(
7249 nd_code_loc(coverage_node),
7250 nd_node_id(coverage_node),
7254 CHECK(COMPILE_(ret,
"else", node, popped));
7255 ADD_INSNL(ret, orig_node, jump, endlabel);
7257 ADD_SEQ(ret, body_seq);
7258 ADD_LABEL(ret, endlabel);
7262static int iseq_compile_pattern_match(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache);
7264static int iseq_compile_pattern_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *match_failed,
bool in_single_pattern,
int base_index);
7265static int iseq_compile_array_deconstruct(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *deconstruct,
LABEL *deconstructed,
LABEL *match_failed,
LABEL *type_error,
bool in_single_pattern,
int base_index,
bool use_deconstructed_cache);
7266static int iseq_compile_pattern_set_general_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
VALUE errmsg,
int base_index);
7267static int iseq_compile_pattern_set_length_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
VALUE errmsg,
VALUE pattern_length,
int base_index);
7268static int iseq_compile_pattern_set_eqq_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
int base_index);
7270#define CASE3_BI_OFFSET_DECONSTRUCTED_CACHE 0
7271#define CASE3_BI_OFFSET_ERROR_STRING 1
7272#define CASE3_BI_OFFSET_KEY_ERROR_P 2
7273#define CASE3_BI_OFFSET_KEY_ERROR_MATCHEE 3
7274#define CASE3_BI_OFFSET_KEY_ERROR_KEY 4
7277iseq_compile_pattern_each(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *matched,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache)
7279 const int line = nd_line(node);
7280 const NODE *line_node = node;
7282 switch (nd_type(node)) {
7336 const NODE *args = RNODE_ARYPTN(node)->pre_args;
7337 const int pre_args_num = RNODE_ARYPTN(node)->pre_args ?
rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->pre_args)->as.nd_alen) : 0;
7338 const int post_args_num = RNODE_ARYPTN(node)->post_args ?
rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->post_args)->as.nd_alen) : 0;
7340 const int min_argc = pre_args_num + post_args_num;
7341 const int use_rest_num = RNODE_ARYPTN(node)->rest_arg && (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) ||
7342 (!NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) && post_args_num > 0));
7344 LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7346 match_failed = NEW_LABEL(line);
7347 type_error = NEW_LABEL(line);
7348 deconstruct = NEW_LABEL(line);
7349 deconstructed = NEW_LABEL(line);
7352 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7353 ADD_INSN(ret, line_node, swap);
7359 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7361 CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7363 ADD_INSN(ret, line_node, dup);
7364 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7365 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7366 ADD_SEND(ret, line_node, RNODE_ARYPTN(node)->rest_arg ? idGE : idEq,
INT2FIX(1));
7367 if (in_single_pattern) {
7368 CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node,
7369 RNODE_ARYPTN(node)->rest_arg ? rb_fstring_lit(
"%p length mismatch (given %p, expected %p+)") :
7370 rb_fstring_lit(
"%p length mismatch (given %p, expected %p)"),
7371 INT2FIX(min_argc), base_index + 1 ));
7373 ADD_INSNL(ret, line_node, branchunless, match_failed);
7375 for (i = 0; i < pre_args_num; i++) {
7376 ADD_INSN(ret, line_node, dup);
7377 ADD_INSN1(ret, line_node, putobject,
INT2FIX(i));
7378 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7379 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7380 args = RNODE_LIST(args)->nd_next;
7383 if (RNODE_ARYPTN(node)->rest_arg) {
7384 if (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg)) {
7385 ADD_INSN(ret, line_node, dup);
7386 ADD_INSN1(ret, line_node, putobject,
INT2FIX(pre_args_num));
7387 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7388 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7389 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7390 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7391 ADD_INSN1(ret, line_node, setn,
INT2FIX(4));
7392 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7394 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_ARYPTN(node)->rest_arg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7397 if (post_args_num > 0) {
7398 ADD_INSN(ret, line_node, dup);
7399 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7400 ADD_INSN1(ret, line_node, putobject,
INT2FIX(min_argc));
7401 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7402 ADD_INSN1(ret, line_node, setn,
INT2FIX(2));
7403 ADD_INSN(ret, line_node, pop);
7408 args = RNODE_ARYPTN(node)->post_args;
7409 for (i = 0; i < post_args_num; i++) {
7410 ADD_INSN(ret, line_node, dup);
7412 ADD_INSN1(ret, line_node, putobject,
INT2FIX(pre_args_num + i));
7413 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7414 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7416 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7417 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7418 args = RNODE_LIST(args)->nd_next;
7421 ADD_INSN(ret, line_node, pop);
7423 ADD_INSN(ret, line_node, pop);
7425 ADD_INSNL(ret, line_node, jump, matched);
7426 ADD_INSN(ret, line_node, putnil);
7428 ADD_INSN(ret, line_node, putnil);
7431 ADD_LABEL(ret, type_error);
7432 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7434 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct must return Array"));
7435 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7436 ADD_INSN(ret, line_node, pop);
7438 ADD_LABEL(ret, match_failed);
7439 ADD_INSN(ret, line_node, pop);
7441 ADD_INSN(ret, line_node, pop);
7443 ADD_INSNL(ret, line_node, jump, unmatched);
7496 const NODE *args = RNODE_FNDPTN(node)->args;
7497 const int args_num = RNODE_FNDPTN(node)->args ?
rb_long2int(RNODE_LIST(RNODE_FNDPTN(node)->args)->as.nd_alen) : 0;
7499 LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7500 match_failed = NEW_LABEL(line);
7501 type_error = NEW_LABEL(line);
7502 deconstruct = NEW_LABEL(line);
7503 deconstructed = NEW_LABEL(line);
7505 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7507 CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7509 ADD_INSN(ret, line_node, dup);
7510 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7511 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7512 ADD_SEND(ret, line_node, idGE,
INT2FIX(1));
7513 if (in_single_pattern) {
7514 CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node, rb_fstring_lit(
"%p length mismatch (given %p, expected %p+)"),
INT2FIX(args_num), base_index + 1 ));
7516 ADD_INSNL(ret, line_node, branchunless, match_failed);
7519 LABEL *while_begin = NEW_LABEL(nd_line(node));
7520 LABEL *next_loop = NEW_LABEL(nd_line(node));
7521 LABEL *find_succeeded = NEW_LABEL(line);
7522 LABEL *find_failed = NEW_LABEL(nd_line(node));
7525 ADD_INSN(ret, line_node, dup);
7526 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
7528 ADD_INSN(ret, line_node, dup);
7529 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7530 ADD_SEND(ret, line_node, idMINUS,
INT2FIX(1));
7532 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7534 ADD_LABEL(ret, while_begin);
7536 ADD_INSN(ret, line_node, dup);
7537 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7538 ADD_SEND(ret, line_node, idLE,
INT2FIX(1));
7539 ADD_INSNL(ret, line_node, branchunless, find_failed);
7541 for (j = 0; j < args_num; j++) {
7542 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7543 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7545 ADD_INSN1(ret, line_node, putobject,
INT2FIX(j));
7546 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7548 ADD_SEND(ret, line_node, idAREF,
INT2FIX(1));
7550 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, next_loop, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7551 args = RNODE_LIST(args)->nd_next;
7554 if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->pre_rest_arg)) {
7555 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7556 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
7557 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7558 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7559 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->pre_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7561 if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->post_rest_arg)) {
7562 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7563 ADD_INSN1(ret, line_node, topn,
INT2FIX(1));
7564 ADD_INSN1(ret, line_node, putobject,
INT2FIX(args_num));
7565 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7566 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7567 ADD_SEND(ret, line_node, idAREF,
INT2FIX(2));
7568 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->post_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 ,
false));
7570 ADD_INSNL(ret, line_node, jump, find_succeeded);
7572 ADD_LABEL(ret, next_loop);
7573 ADD_INSN1(ret, line_node, putobject,
INT2FIX(1));
7574 ADD_SEND(ret, line_node, idPLUS,
INT2FIX(1));
7575 ADD_INSNL(ret, line_node, jump, while_begin);
7577 ADD_LABEL(ret, find_failed);
7578 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(3));
7579 if (in_single_pattern) {
7580 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7581 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"%p does not match to find pattern"));
7582 ADD_INSN1(ret, line_node, topn,
INT2FIX(2));
7583 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(2));
7584 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7586 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7587 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7589 ADD_INSN(ret, line_node, pop);
7590 ADD_INSN(ret, line_node, pop);
7592 ADD_INSNL(ret, line_node, jump, match_failed);
7593 ADD_INSN1(ret, line_node, dupn,
INT2FIX(3));
7595 ADD_LABEL(ret, find_succeeded);
7596 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(3));
7599 ADD_INSN(ret, line_node, pop);
7600 ADD_INSNL(ret, line_node, jump, matched);
7601 ADD_INSN(ret, line_node, putnil);
7603 ADD_LABEL(ret, type_error);
7604 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7606 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct must return Array"));
7607 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7608 ADD_INSN(ret, line_node, pop);
7610 ADD_LABEL(ret, match_failed);
7611 ADD_INSN(ret, line_node, pop);
7612 ADD_INSNL(ret, line_node, jump, unmatched);
7676 LABEL *match_failed, *type_error;
7679 match_failed = NEW_LABEL(line);
7680 type_error = NEW_LABEL(line);
7682 if (RNODE_HSHPTN(node)->nd_pkwargs && !RNODE_HSHPTN(node)->nd_pkwrestarg) {
7683 const NODE *kw_args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7684 keys =
rb_ary_new_capa(kw_args ? RNODE_LIST(kw_args)->as.nd_alen/2 : 0);
7686 rb_ary_push(keys, get_symbol_value(iseq, RNODE_LIST(kw_args)->nd_head));
7687 kw_args = RNODE_LIST(RNODE_LIST(kw_args)->nd_next)->nd_next;
7691 CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7693 ADD_INSN(ret, line_node, dup);
7694 ADD_INSN1(ret, line_node, putobject,
ID2SYM(rb_intern(
"deconstruct_keys")));
7695 ADD_SEND(ret, line_node, idRespond_to,
INT2FIX(1));
7696 if (in_single_pattern) {
7697 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p does not respond to #deconstruct_keys"), base_index + 1 ));
7699 ADD_INSNL(ret, line_node, branchunless, match_failed);
7702 ADD_INSN(ret, line_node, putnil);
7705 RB_OBJ_SET_FROZEN_SHAREABLE(keys);
7706 ADD_INSN1(ret, line_node, duparray, keys);
7709 ADD_SEND(ret, line_node, rb_intern(
"deconstruct_keys"),
INT2FIX(1));
7711 ADD_INSN(ret, line_node, dup);
7713 ADD_INSNL(ret, line_node, branchunless, type_error);
7715 if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7716 ADD_SEND(ret, line_node, rb_intern(
"dup"),
INT2FIX(0));
7719 if (RNODE_HSHPTN(node)->nd_pkwargs) {
7723 args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7725 DECL_ANCHOR(match_values);
7726 INIT_ANCHOR(match_values);
7727 keys_num =
rb_long2int(RNODE_LIST(args)->as.nd_alen) / 2;
7728 for (i = 0; i < keys_num; i++) {
7729 NODE *key_node = RNODE_LIST(args)->nd_head;
7730 NODE *value_node = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_head;
7731 VALUE key = get_symbol_value(iseq, key_node);
7733 ADD_INSN(ret, line_node, dup);
7734 ADD_INSN1(ret, line_node, putobject, key);
7735 ADD_SEND(ret, line_node, rb_intern(
"key?"),
INT2FIX(1));
7736 if (in_single_pattern) {
7737 LABEL *match_succeeded;
7738 match_succeeded = NEW_LABEL(line);
7740 ADD_INSN(ret, line_node, dup);
7741 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7744 ADD_INSN1(ret, line_node, putobject, RB_OBJ_SET_SHAREABLE(str));
7745 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 2 ));
7746 ADD_INSN1(ret, line_node, putobject,
Qtrue);
7747 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 3 ));
7748 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
7749 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4 ));
7750 ADD_INSN1(ret, line_node, putobject, key);
7751 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_KEY + 5 ));
7753 ADD_INSN1(ret, line_node, adjuststack,
INT2FIX(4));
7755 ADD_LABEL(ret, match_succeeded);
7757 ADD_INSNL(ret, line_node, branchunless, match_failed);
7759 ADD_INSN(match_values, line_node, dup);
7760 ADD_INSN1(match_values, line_node, putobject, key);
7761 ADD_SEND(match_values, line_node, RNODE_HSHPTN(node)->nd_pkwrestarg ? rb_intern(
"delete") : idAREF,
INT2FIX(1));
7762 CHECK(iseq_compile_pattern_match(iseq, match_values, value_node, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7763 args = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_next;
7765 ADD_SEQ(ret, match_values);
7769 ADD_INSN(ret, line_node, dup);
7770 ADD_SEND(ret, line_node, idEmptyP,
INT2FIX(0));
7771 if (in_single_pattern) {
7772 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p is not empty"), base_index + 1 ));
7774 ADD_INSNL(ret, line_node, branchunless, match_failed);
7777 if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7778 if (RNODE_HSHPTN(node)->nd_pkwrestarg == NODE_SPECIAL_NO_REST_KEYWORD) {
7779 ADD_INSN(ret, line_node, dup);
7780 ADD_SEND(ret, line_node, idEmptyP,
INT2FIX(0));
7781 if (in_single_pattern) {
7782 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"rest of %p is not empty"), base_index + 1 ));
7784 ADD_INSNL(ret, line_node, branchunless, match_failed);
7787 ADD_INSN(ret, line_node, dup);
7788 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_HSHPTN(node)->nd_pkwrestarg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 ,
false));
7792 ADD_INSN(ret, line_node, pop);
7793 ADD_INSNL(ret, line_node, jump, matched);
7794 ADD_INSN(ret, line_node, putnil);
7796 ADD_LABEL(ret, type_error);
7797 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7799 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"deconstruct_keys must return Hash"));
7800 ADD_SEND(ret, line_node, id_core_raise,
INT2FIX(2));
7801 ADD_INSN(ret, line_node, pop);
7803 ADD_LABEL(ret, match_failed);
7804 ADD_INSN(ret, line_node, pop);
7805 ADD_INSNL(ret, line_node, jump, unmatched);
7814 case NODE_IMAGINARY:
7842 CHECK(COMPILE(ret,
"case in literal", node));
7843 if (in_single_pattern) {
7844 ADD_INSN1(ret, line_node, dupn,
INT2FIX(2));
7846 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
7847 if (in_single_pattern) {
7848 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 2 ));
7850 ADD_INSNL(ret, line_node, branchif, matched);
7851 ADD_INSNL(ret, line_node, jump, unmatched);
7855 ID id = RNODE_LASGN(node)->nd_vid;
7856 int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
7858 if (in_alt_pattern) {
7859 const char *name = rb_id2name(
id);
7860 if (name && strlen(name) > 0 && name[0] !=
'_') {
7861 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
7867 ADD_SETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
7868 ADD_INSNL(ret, line_node, jump, matched);
7873 ID id = RNODE_DASGN(node)->nd_vid;
7875 idx = get_dyna_var_idx(iseq,
id, &lv, &ls);
7877 if (in_alt_pattern) {
7878 const char *name = rb_id2name(
id);
7879 if (name && strlen(name) > 0 && name[0] !=
'_') {
7880 COMPILE_ERROR(ERROR_ARGS
"illegal variable in alternative pattern (%"PRIsVALUE
")",
7887 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
7891 ADD_SETLOCAL(ret, line_node, ls - idx, lv);
7892 ADD_INSNL(ret, line_node, jump, matched);
7897 LABEL *match_failed;
7898 match_failed = unmatched;
7899 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_IF(node)->nd_body, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7900 CHECK(COMPILE(ret,
"case in if", RNODE_IF(node)->nd_cond));
7901 if (in_single_pattern) {
7902 LABEL *match_succeeded;
7903 match_succeeded = NEW_LABEL(line);
7905 ADD_INSN(ret, line_node, dup);
7906 if (nd_type_p(node, NODE_IF)) {
7907 ADD_INSNL(ret, line_node, branchif, match_succeeded);
7910 ADD_INSNL(ret, line_node, branchunless, match_succeeded);
7913 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"guard clause does not return true"));
7914 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
7915 ADD_INSN1(ret, line_node, putobject,
Qfalse);
7916 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
7918 ADD_INSN(ret, line_node, pop);
7919 ADD_INSN(ret, line_node, pop);
7921 ADD_LABEL(ret, match_succeeded);
7923 if (nd_type_p(node, NODE_IF)) {
7924 ADD_INSNL(ret, line_node, branchunless, match_failed);
7927 ADD_INSNL(ret, line_node, branchif, match_failed);
7929 ADD_INSNL(ret, line_node, jump, matched);
7934 LABEL *match_failed;
7935 match_failed = NEW_LABEL(line);
7937 n = RNODE_HASH(node)->nd_head;
7938 if (! (nd_type_p(n, NODE_LIST) && RNODE_LIST(n)->as.nd_alen == 2)) {
7939 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
7943 ADD_INSN(ret, line_node, dup);
7944 CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(n)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 , use_deconstructed_cache));
7945 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head, matched, match_failed, in_single_pattern, in_alt_pattern, base_index,
false));
7946 ADD_INSN(ret, line_node, putnil);
7948 ADD_LABEL(ret, match_failed);
7949 ADD_INSN(ret, line_node, pop);
7950 ADD_INSNL(ret, line_node, jump, unmatched);
7954 LABEL *match_succeeded, *fin;
7955 match_succeeded = NEW_LABEL(line);
7956 fin = NEW_LABEL(line);
7958 ADD_INSN(ret, line_node, dup);
7959 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_1st, match_succeeded, fin, in_single_pattern,
true, base_index + 1 , use_deconstructed_cache));
7960 ADD_LABEL(ret, match_succeeded);
7961 ADD_INSN(ret, line_node, pop);
7962 ADD_INSNL(ret, line_node, jump, matched);
7963 ADD_INSN(ret, line_node, putnil);
7964 ADD_LABEL(ret, fin);
7965 CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_2nd, matched, unmatched, in_single_pattern,
true, base_index, use_deconstructed_cache));
7969 UNKNOWN_NODE(
"NODE_IN", node, COMPILE_NG);
7975iseq_compile_pattern_match(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *unmatched,
bool in_single_pattern,
bool in_alt_pattern,
int base_index,
bool use_deconstructed_cache)
7977 LABEL *fin = NEW_LABEL(nd_line(node));
7978 CHECK(iseq_compile_pattern_each(iseq, ret, node, fin, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7979 ADD_LABEL(ret, fin);
7984iseq_compile_pattern_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *match_failed,
bool in_single_pattern,
int base_index)
7986 const NODE *line_node = node;
7988 if (RNODE_ARYPTN(node)->nd_pconst) {
7989 ADD_INSN(ret, line_node, dup);
7990 CHECK(COMPILE(ret,
"constant", RNODE_ARYPTN(node)->nd_pconst));
7991 if (in_single_pattern) {
7992 ADD_INSN1(ret, line_node, dupn,
INT2FIX(2));
7994 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_CASE));
7995 if (in_single_pattern) {
7996 CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 3 ));
7998 ADD_INSNL(ret, line_node, branchunless, match_failed);
8005iseq_compile_array_deconstruct(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
LABEL *deconstruct,
LABEL *deconstructed,
LABEL *match_failed,
LABEL *type_error,
bool in_single_pattern,
int base_index,
bool use_deconstructed_cache)
8007 const NODE *line_node = node;
8011 if (use_deconstructed_cache) {
8013 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
8014 ADD_INSNL(ret, line_node, branchnil, deconstruct);
8017 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
8018 ADD_INSNL(ret, line_node, branchunless, match_failed);
8021 ADD_INSN(ret, line_node, pop);
8022 ADD_INSN1(ret, line_node, topn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE - 1 ));
8023 ADD_INSNL(ret, line_node, jump, deconstructed);
8026 ADD_INSNL(ret, line_node, jump, deconstruct);
8029 ADD_LABEL(ret, deconstruct);
8030 ADD_INSN(ret, line_node, dup);
8031 ADD_INSN1(ret, line_node, putobject,
ID2SYM(rb_intern(
"deconstruct")));
8032 ADD_SEND(ret, line_node, idRespond_to,
INT2FIX(1));
8035 if (use_deconstructed_cache) {
8036 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE + 1 ));
8039 if (in_single_pattern) {
8040 CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit(
"%p does not respond to #deconstruct"), base_index + 1 ));
8043 ADD_INSNL(ret, line_node, branchunless, match_failed);
8045 ADD_SEND(ret, line_node, rb_intern(
"deconstruct"),
INT2FIX(0));
8048 if (use_deconstructed_cache) {
8049 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
8052 ADD_INSN(ret, line_node, dup);
8054 ADD_INSNL(ret, line_node, branchunless, type_error);
8056 ADD_LABEL(ret, deconstructed);
8072 const int line = nd_line(node);
8073 const NODE *line_node = node;
8074 LABEL *match_succeeded = NEW_LABEL(line);
8076 ADD_INSN(ret, line_node, dup);
8077 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8079 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8080 ADD_INSN1(ret, line_node, putobject, errmsg);
8081 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8082 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(2));
8083 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8085 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8086 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
8088 ADD_INSN(ret, line_node, pop);
8089 ADD_INSN(ret, line_node, pop);
8090 ADD_LABEL(ret, match_succeeded);
8106 const int line = nd_line(node);
8107 const NODE *line_node = node;
8108 LABEL *match_succeeded = NEW_LABEL(line);
8110 ADD_INSN(ret, line_node, dup);
8111 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8113 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8114 ADD_INSN1(ret, line_node, putobject, errmsg);
8115 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8116 ADD_INSN(ret, line_node, dup);
8117 ADD_SEND(ret, line_node, idLength,
INT2FIX(0));
8118 ADD_INSN1(ret, line_node, putobject, pattern_length);
8119 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(4));
8120 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8122 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8123 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2));
8125 ADD_INSN(ret, line_node, pop);
8126 ADD_INSN(ret, line_node, pop);
8127 ADD_LABEL(ret, match_succeeded);
8133iseq_compile_pattern_set_eqq_errmsg(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
int base_index)
8143 const int line = nd_line(node);
8144 const NODE *line_node = node;
8145 LABEL *match_succeeded = NEW_LABEL(line);
8147 ADD_INSN(ret, line_node, dup);
8148 ADD_INSNL(ret, line_node, branchif, match_succeeded);
8150 ADD_INSN1(ret, line_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8151 ADD_INSN1(ret, line_node, putobject, rb_fstring_lit(
"%p === %p does not return true"));
8152 ADD_INSN1(ret, line_node, topn,
INT2FIX(3));
8153 ADD_INSN1(ret, line_node, topn,
INT2FIX(5));
8154 ADD_SEND(ret, line_node, id_core_sprintf,
INT2FIX(3));
8155 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 ));
8157 ADD_INSN1(ret, line_node, putobject,
Qfalse);
8158 ADD_INSN1(ret, line_node, setn,
INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 ));
8160 ADD_INSN(ret, line_node, pop);
8161 ADD_INSN(ret, line_node, pop);
8163 ADD_LABEL(ret, match_succeeded);
8164 ADD_INSN1(ret, line_node, setn,
INT2FIX(2));
8165 ADD_INSN(ret, line_node, pop);
8166 ADD_INSN(ret, line_node, pop);
8174 const NODE *pattern;
8175 const NODE *node = orig_node;
8176 LABEL *endlabel, *elselabel;
8178 DECL_ANCHOR(body_seq);
8179 DECL_ANCHOR(cond_seq);
8181 enum node_type
type;
8182 const NODE *line_node;
8185 bool single_pattern;
8188 INIT_ANCHOR(body_seq);
8189 INIT_ANCHOR(cond_seq);
8191 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"case");
8193 node = RNODE_CASE3(node)->nd_body;
8194 EXPECT_NODE(
"NODE_CASE3", node, NODE_IN, COMPILE_NG);
8195 type = nd_type(node);
8196 line = nd_line(node);
8198 single_pattern = !RNODE_IN(node)->nd_next;
8200 endlabel = NEW_LABEL(line);
8201 elselabel = NEW_LABEL(line);
8203 if (single_pattern) {
8205 ADD_INSN(head, line_node, putnil);
8206 ADD_INSN(head, line_node, putnil);
8207 ADD_INSN1(head, line_node, putobject,
Qfalse);
8208 ADD_INSN(head, line_node, putnil);
8210 ADD_INSN(head, line_node, putnil);
8212 CHECK(COMPILE(head,
"case base", RNODE_CASE3(orig_node)->nd_head));
8216 while (
type == NODE_IN) {
8220 ADD_INSN(body_seq, line_node, putnil);
8222 l1 = NEW_LABEL(line);
8223 ADD_LABEL(body_seq, l1);
8224 ADD_INSN1(body_seq, line_node, adjuststack,
INT2FIX(single_pattern ? 6 : 2));
8226 const NODE *
const coverage_node = RNODE_IN(node)->nd_body ? RNODE_IN(node)->nd_body : node;
8227 add_trace_branch_coverage(
8230 nd_code_loc(coverage_node),
8231 nd_node_id(coverage_node),
8236 CHECK(COMPILE_(body_seq,
"in body", RNODE_IN(node)->nd_body, popped));
8237 ADD_INSNL(body_seq, line_node, jump, endlabel);
8239 pattern = RNODE_IN(node)->nd_head;
8241 int pat_line = nd_line(pattern);
8242 LABEL *next_pat = NEW_LABEL(pat_line);
8243 ADD_INSN (cond_seq, pattern, dup);
8245 CHECK(iseq_compile_pattern_each(iseq, cond_seq, pattern, l1, next_pat, single_pattern,
false, 2,
true));
8246 ADD_LABEL(cond_seq, next_pat);
8247 LABEL_UNREMOVABLE(next_pat);
8250 COMPILE_ERROR(ERROR_ARGS
"unexpected node");
8254 node = RNODE_IN(node)->nd_next;
8258 type = nd_type(node);
8259 line = nd_line(node);
8264 ADD_LABEL(cond_seq, elselabel);
8265 ADD_INSN(cond_seq, line_node, pop);
8266 ADD_INSN(cond_seq, line_node, pop);
8267 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id,
"else", branches);
8268 CHECK(COMPILE_(cond_seq,
"else", node, popped));
8269 ADD_INSNL(cond_seq, line_node, jump, endlabel);
8270 ADD_INSN(cond_seq, line_node, putnil);
8272 ADD_INSN(cond_seq, line_node, putnil);
8276 debugs(
"== else (implicit)\n");
8277 ADD_LABEL(cond_seq, elselabel);
8278 add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id,
"else", branches);
8279 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8281 if (single_pattern) {
8289 LABEL *key_error, *fin;
8292 key_error = NEW_LABEL(line);
8293 fin = NEW_LABEL(line);
8296 kw_arg->references = 0;
8297 kw_arg->keyword_len = 2;
8298 kw_arg->keywords[0] =
ID2SYM(rb_intern(
"matchee"));
8299 kw_arg->keywords[1] =
ID2SYM(rb_intern(
"key"));
8301 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_P + 2));
8302 ADD_INSNL(cond_seq, orig_node, branchif, key_error);
8304 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8305 ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit(
"%p: %s"));
8306 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(4));
8307 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8308 ADD_SEND(cond_seq, orig_node, id_core_sprintf,
INT2FIX(3));
8309 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(2));
8310 ADD_INSNL(cond_seq, orig_node, jump, fin);
8312 ADD_LABEL(cond_seq, key_error);
8314 ADD_INSN1(cond_seq, orig_node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8315 ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit(
"%p: %s"));
8316 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(4));
8317 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8318 ADD_SEND(cond_seq, orig_node, id_core_sprintf,
INT2FIX(3));
8319 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4));
8320 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_KEY + 5));
8321 ADD_SEND_R(cond_seq, orig_node, rb_intern(
"new"),
INT2FIX(1), NULL,
INT2FIX(VM_CALL_KWARG), kw_arg);
8322 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(1));
8324 ADD_LABEL(cond_seq, fin);
8328 ADD_INSN1(cond_seq, orig_node, topn,
INT2FIX(2));
8329 ADD_SEND(cond_seq, orig_node, id_core_raise,
INT2FIX(2));
8331 ADD_INSN1(cond_seq, orig_node, adjuststack,
INT2FIX(single_pattern ? 7 : 3));
8333 ADD_INSN(cond_seq, orig_node, putnil);
8335 ADD_INSNL(cond_seq, orig_node, jump, endlabel);
8336 ADD_INSN1(cond_seq, orig_node, dupn,
INT2FIX(single_pattern ? 5 : 1));
8338 ADD_INSN(cond_seq, line_node, putnil);
8342 ADD_SEQ(ret, cond_seq);
8343 ADD_SEQ(ret, body_seq);
8344 ADD_LABEL(ret, endlabel);
8348#undef CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
8349#undef CASE3_BI_OFFSET_ERROR_STRING
8350#undef CASE3_BI_OFFSET_KEY_ERROR_P
8351#undef CASE3_BI_OFFSET_KEY_ERROR_MATCHEE
8352#undef CASE3_BI_OFFSET_KEY_ERROR_KEY
8357 const int line = (int)nd_line(node);
8358 const NODE *line_node = node;
8360 LABEL *prev_start_label = ISEQ_COMPILE_DATA(iseq)->start_label;
8361 LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
8362 LABEL *prev_redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label;
8363 int prev_loopval_popped = ISEQ_COMPILE_DATA(iseq)->loopval_popped;
8368 LABEL *next_label = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(line);
8369 LABEL *redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label = NEW_LABEL(line);
8370 LABEL *break_label = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(line);
8371 LABEL *end_label = NEW_LABEL(line);
8372 LABEL *adjust_label = NEW_LABEL(line);
8374 LABEL *next_catch_label = NEW_LABEL(line);
8375 LABEL *tmp_label = NULL;
8377 ISEQ_COMPILE_DATA(iseq)->loopval_popped = 0;
8378 push_ensure_entry(iseq, &enl, NULL, NULL);
8380 if (RNODE_WHILE(node)->nd_state == 1) {
8381 ADD_INSNL(ret, line_node, jump, next_label);
8384 tmp_label = NEW_LABEL(line);
8385 ADD_INSNL(ret, line_node, jump, tmp_label);
8387 ADD_LABEL(ret, adjust_label);
8388 ADD_INSN(ret, line_node, putnil);
8389 ADD_LABEL(ret, next_catch_label);
8390 ADD_INSN(ret, line_node, pop);
8391 ADD_INSNL(ret, line_node, jump, next_label);
8392 if (tmp_label) ADD_LABEL(ret, tmp_label);
8394 ADD_LABEL(ret, redo_label);
8395 branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
type == NODE_WHILE ?
"while" :
"until");
8397 const NODE *
const coverage_node = RNODE_WHILE(node)->nd_body ? RNODE_WHILE(node)->nd_body : node;
8398 add_trace_branch_coverage(
8401 nd_code_loc(coverage_node),
8402 nd_node_id(coverage_node),
8407 CHECK(COMPILE_POPPED(ret,
"while body", RNODE_WHILE(node)->nd_body));
8408 ADD_LABEL(ret, next_label);
8410 if (
type == NODE_WHILE) {
8411 CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8412 redo_label, end_label));
8416 CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8417 end_label, redo_label));
8420 ADD_LABEL(ret, end_label);
8421 ADD_ADJUST_RESTORE(ret, adjust_label);
8423 if (UNDEF_P(RNODE_WHILE(node)->nd_state)) {
8425 COMPILE_ERROR(ERROR_ARGS
"unsupported: putundef");
8429 ADD_INSN(ret, line_node, putnil);
8432 ADD_LABEL(ret, break_label);
8435 ADD_INSN(ret, line_node, pop);
8438 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, redo_label, break_label, NULL,
8440 ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, redo_label, break_label, NULL,
8442 ADD_CATCH_ENTRY(CATCH_TYPE_REDO, redo_label, break_label, NULL,
8443 ISEQ_COMPILE_DATA(iseq)->redo_label);
8445 ISEQ_COMPILE_DATA(iseq)->start_label = prev_start_label;
8446 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
8447 ISEQ_COMPILE_DATA(iseq)->redo_label = prev_redo_label;
8448 ISEQ_COMPILE_DATA(iseq)->loopval_popped = prev_loopval_popped;
8449 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->prev;
8456 const int line = nd_line(node);
8457 const NODE *line_node = node;
8458 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
8459 LABEL *retry_label = NEW_LABEL(line);
8460 LABEL *retry_end_l = NEW_LABEL(line);
8463 ADD_LABEL(ret, retry_label);
8464 if (nd_type_p(node, NODE_FOR)) {
8465 CHECK(COMPILE(ret,
"iter caller (for)", RNODE_FOR(node)->nd_iter));
8467 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8468 NEW_CHILD_ISEQ(RNODE_FOR(node)->nd_body, make_name_for_block(iseq),
8469 ISEQ_TYPE_BLOCK, line);
8470 ADD_SEND_WITH_BLOCK(ret, line_node, idEach,
INT2FIX(0), child_iseq);
8473 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8474 NEW_CHILD_ISEQ(RNODE_ITER(node)->nd_body, make_name_for_block(iseq),
8475 ISEQ_TYPE_BLOCK, line);
8476 CHECK(COMPILE(ret,
"iter caller", RNODE_ITER(node)->nd_iter));
8490 iobj = IS_INSN(last_elem) ? (
INSN*) last_elem : (
INSN*) get_prev_insn((
INSN*) last_elem);
8491 while (!IS_INSN_ID(iobj, send) && !IS_INSN_ID(iobj, invokesuper) && !IS_INSN_ID(iobj, sendforward) && !IS_INSN_ID(iobj, invokesuperforward)) {
8492 iobj = (
INSN*) get_prev_insn(iobj);
8494 ELEM_INSERT_NEXT(&iobj->link, (
LINK_ELEMENT*) retry_end_l);
8498 if (&iobj->link == LAST_ELEMENT(ret)) {
8504 ADD_INSN(ret, line_node, pop);
8507 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
8509 ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, child_iseq, retry_end_l);
8519 const NODE *line_node = node;
8520 const NODE *var = RNODE_FOR_MASGN(node)->nd_var;
8521 LABEL *not_single = NEW_LABEL(nd_line(var));
8522 LABEL *not_ary = NEW_LABEL(nd_line(var));
8523 CHECK(COMPILE(ret,
"for var", var));
8524 ADD_INSN(ret, line_node, dup);
8525 ADD_CALL(ret, line_node, idLength,
INT2FIX(0));
8526 ADD_INSN1(ret, line_node, putobject,
INT2FIX(1));
8527 ADD_CALL(ret, line_node, idEq,
INT2FIX(1));
8528 ADD_INSNL(ret, line_node, branchunless, not_single);
8529 ADD_INSN(ret, line_node, dup);
8530 ADD_INSN1(ret, line_node, putobject,
INT2FIX(0));
8531 ADD_CALL(ret, line_node, idAREF,
INT2FIX(1));
8532 ADD_INSN1(ret, line_node, putobject,
rb_cArray);
8533 ADD_INSN(ret, line_node, swap);
8534 ADD_CALL(ret, line_node, rb_intern(
"try_convert"),
INT2FIX(1));
8535 ADD_INSN(ret, line_node, dup);
8536 ADD_INSNL(ret, line_node, branchunless, not_ary);
8537 ADD_INSN(ret, line_node, swap);
8538 ADD_LABEL(ret, not_ary);
8539 ADD_INSN(ret, line_node, pop);
8540 ADD_LABEL(ret, not_single);
8547 const NODE *line_node = node;
8548 unsigned long throw_flag = 0;
8550 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8552 LABEL *splabel = NEW_LABEL(0);
8553 ADD_LABEL(ret, splabel);
8554 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8555 CHECK(COMPILE_(ret,
"break val (while/until)", RNODE_BREAK(node)->nd_stts,
8556 ISEQ_COMPILE_DATA(iseq)->loopval_popped));
8557 add_ensure_iseq(ret, iseq, 0);
8558 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8559 ADD_ADJUST_RESTORE(ret, splabel);
8562 ADD_INSN(ret, line_node, putnil);
8569 if (!ISEQ_COMPILE_DATA(ip)) {
8574 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8575 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8577 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8580 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8581 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with break");
8585 ip = ISEQ_BODY(ip)->parent_iseq;
8590 CHECK(COMPILE(ret,
"break val (block)", RNODE_BREAK(node)->nd_stts));
8591 ADD_INSN1(ret, line_node,
throw,
INT2FIX(throw_flag | TAG_BREAK));
8593 ADD_INSN(ret, line_node, pop);
8597 COMPILE_ERROR(ERROR_ARGS
"Invalid break");
8606 const NODE *line_node = node;
8607 unsigned long throw_flag = 0;
8609 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8610 LABEL *splabel = NEW_LABEL(0);
8611 debugs(
"next in while loop\n");
8612 ADD_LABEL(ret, splabel);
8613 CHECK(COMPILE(ret,
"next val/valid syntax?", RNODE_NEXT(node)->nd_stts));
8614 add_ensure_iseq(ret, iseq, 0);
8615 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8616 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8617 ADD_ADJUST_RESTORE(ret, splabel);
8619 ADD_INSN(ret, line_node, putnil);
8622 else if (ISEQ_COMPILE_DATA(iseq)->end_label && can_add_ensure_iseq(iseq)) {
8623 LABEL *splabel = NEW_LABEL(0);
8624 debugs(
"next in block\n");
8625 ADD_LABEL(ret, splabel);
8626 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8627 CHECK(COMPILE(ret,
"next val", RNODE_NEXT(node)->nd_stts));
8628 add_ensure_iseq(ret, iseq, 0);
8629 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8630 ADD_ADJUST_RESTORE(ret, splabel);
8633 ADD_INSN(ret, line_node, putnil);
8640 if (!ISEQ_COMPILE_DATA(ip)) {
8645 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8646 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8650 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8653 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8654 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with next");
8658 ip = ISEQ_BODY(ip)->parent_iseq;
8661 CHECK(COMPILE(ret,
"next val", RNODE_NEXT(node)->nd_stts));
8662 ADD_INSN1(ret, line_node,
throw,
INT2FIX(throw_flag | TAG_NEXT));
8665 ADD_INSN(ret, line_node, pop);
8669 COMPILE_ERROR(ERROR_ARGS
"Invalid next");
8679 const NODE *line_node = node;
8681 if (ISEQ_COMPILE_DATA(iseq)->redo_label && can_add_ensure_iseq(iseq)) {
8682 LABEL *splabel = NEW_LABEL(0);
8683 debugs(
"redo in while");
8684 ADD_LABEL(ret, splabel);
8685 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8686 add_ensure_iseq(ret, iseq, 0);
8687 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->redo_label);
8688 ADD_ADJUST_RESTORE(ret, splabel);
8690 ADD_INSN(ret, line_node, putnil);
8693 else if (ISEQ_BODY(iseq)->
type != ISEQ_TYPE_EVAL && ISEQ_COMPILE_DATA(iseq)->start_label && can_add_ensure_iseq(iseq)) {
8694 LABEL *splabel = NEW_LABEL(0);
8696 debugs(
"redo in block");
8697 ADD_LABEL(ret, splabel);
8698 add_ensure_iseq(ret, iseq, 0);
8699 ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8700 ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8701 ADD_ADJUST_RESTORE(ret, splabel);
8704 ADD_INSN(ret, line_node, putnil);
8711 if (!ISEQ_COMPILE_DATA(ip)) {
8716 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8719 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_BLOCK) {
8722 else if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_EVAL) {
8723 COMPILE_ERROR(ERROR_ARGS
"Can't escape from eval with redo");
8727 ip = ISEQ_BODY(ip)->parent_iseq;
8730 ADD_INSN(ret, line_node, putnil);
8731 ADD_INSN1(ret, line_node,
throw,
INT2FIX(VM_THROW_NO_ESCAPE_FLAG | TAG_REDO));
8734 ADD_INSN(ret, line_node, pop);
8738 COMPILE_ERROR(ERROR_ARGS
"Invalid redo");
8748 const NODE *line_node = node;
8750 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE) {
8751 ADD_INSN(ret, line_node, putnil);
8752 ADD_INSN1(ret, line_node,
throw,
INT2FIX(TAG_RETRY));
8755 ADD_INSN(ret, line_node, pop);
8759 COMPILE_ERROR(ERROR_ARGS
"Invalid retry");
8768 const int line = nd_line(node);
8769 const NODE *line_node = node;
8770 LABEL *lstart = NEW_LABEL(line);
8771 LABEL *lend = NEW_LABEL(line);
8772 LABEL *lcont = NEW_LABEL(line);
8773 const rb_iseq_t *rescue = NEW_CHILD_ISEQ(RNODE_RESCUE(node)->nd_resq,
8775 ISEQ_BODY(iseq)->location.label),
8776 ISEQ_TYPE_RESCUE, line);
8778 lstart->rescued = LABEL_RESCUE_BEG;
8779 lend->rescued = LABEL_RESCUE_END;
8780 ADD_LABEL(ret, lstart);
8782 bool prev_in_rescue = ISEQ_COMPILE_DATA(iseq)->in_rescue;
8783 ISEQ_COMPILE_DATA(iseq)->in_rescue =
true;
8785 CHECK(COMPILE(ret,
"rescue head", RNODE_RESCUE(node)->nd_head));
8787 ISEQ_COMPILE_DATA(iseq)->in_rescue = prev_in_rescue;
8789 ADD_LABEL(ret, lend);
8790 if (RNODE_RESCUE(node)->nd_else) {
8791 ADD_INSN(ret, line_node, pop);
8792 CHECK(COMPILE(ret,
"rescue else", RNODE_RESCUE(node)->nd_else));
8794 ADD_INSN(ret, line_node, nop);
8795 ADD_LABEL(ret, lcont);
8798 ADD_INSN(ret, line_node, pop);
8802 ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lcont);
8803 ADD_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
8810 const int line = nd_line(node);
8811 const NODE *line_node = node;
8812 const NODE *resq = node;
8814 LABEL *label_miss, *label_hit;
8817 label_miss = NEW_LABEL(line);
8818 label_hit = NEW_LABEL(line);
8820 narg = RNODE_RESBODY(resq)->nd_args;
8822 switch (nd_type(narg)) {
8825 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8826 CHECK(COMPILE(ret,
"rescue arg", RNODE_LIST(narg)->nd_head));
8827 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8828 ADD_INSNL(ret, line_node, branchif, label_hit);
8829 narg = RNODE_LIST(narg)->nd_next;
8835 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8836 CHECK(COMPILE(ret,
"rescue/cond splat", narg));
8837 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE | VM_CHECKMATCH_ARRAY));
8838 ADD_INSNL(ret, line_node, branchif, label_hit);
8841 UNKNOWN_NODE(
"NODE_RESBODY", narg, COMPILE_NG);
8845 ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8847 ADD_INSN1(ret, line_node, checkmatch,
INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8848 ADD_INSNL(ret, line_node, branchif, label_hit);
8850 ADD_INSNL(ret, line_node, jump, label_miss);
8851 ADD_LABEL(ret, label_hit);
8854 if (RNODE_RESBODY(resq)->nd_exc_var) {
8855 CHECK(COMPILE_POPPED(ret,
"resbody exc_var", RNODE_RESBODY(resq)->nd_exc_var));
8858 if (nd_type(RNODE_RESBODY(resq)->nd_body) == NODE_BEGIN && RNODE_BEGIN(RNODE_RESBODY(resq)->nd_body)->nd_body == NULL && !RNODE_RESBODY(resq)->nd_exc_var) {
8860 ADD_SYNTHETIC_INSN(ret, nd_line(RNODE_RESBODY(resq)->nd_body), -1, putnil);
8863 CHECK(COMPILE(ret,
"resbody body", RNODE_RESBODY(resq)->nd_body));
8866 if (ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization) {
8867 ADD_INSN(ret, line_node, nop);
8869 ADD_INSN(ret, line_node, leave);
8870 ADD_LABEL(ret, label_miss);
8871 resq = RNODE_RESBODY(resq)->nd_next;
8879 const int line = nd_line(RNODE_ENSURE(node)->nd_ensr);
8880 const NODE *line_node = node;
8882 const rb_iseq_t *ensure = NEW_CHILD_ISEQ(RNODE_ENSURE(node)->nd_ensr,
8884 ISEQ_TYPE_ENSURE, line);
8885 LABEL *lstart = NEW_LABEL(line);
8886 LABEL *lend = NEW_LABEL(line);
8887 LABEL *lcont = NEW_LABEL(line);
8895 CHECK(COMPILE_POPPED(ensr,
"ensure ensr", RNODE_ENSURE(node)->nd_ensr));
8897 last_leave = last && IS_INSN(last) && IS_INSN_ID(last, leave);
8902 push_ensure_entry(iseq, &enl, &er, RNODE_ENSURE(node)->nd_ensr);
8904 ADD_LABEL(ret, lstart);
8905 CHECK(COMPILE_(ret,
"ensure head", RNODE_ENSURE(node)->nd_head, (popped | last_leave)));
8906 ADD_LABEL(ret, lend);
8908 if (!popped && last_leave) ADD_INSN(ret, line_node, putnil);
8909 ADD_LABEL(ret, lcont);
8910 if (last_leave) ADD_INSN(ret, line_node, pop);
8912 erange = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->erange;
8913 if (lstart->link.next != &lend->link) {
8915 ADD_CATCH_ENTRY(CATCH_TYPE_ENSURE, erange->begin, erange->end,
8917 erange = erange->next;
8921 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl.prev;
8928 const NODE *line_node = node;
8931 enum rb_iseq_type
type = ISEQ_BODY(iseq)->type;
8933 enum rb_iseq_type t =
type;
8934 const NODE *retval = RNODE_RETURN(node)->nd_stts;
8937 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE) {
8938 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
8939 t = ISEQ_BODY(is)->type;
8943 case ISEQ_TYPE_MAIN:
8945 rb_warn(
"argument of top-level return is ignored");
8949 type = ISEQ_TYPE_METHOD;
8956 if (
type == ISEQ_TYPE_METHOD) {
8957 splabel = NEW_LABEL(0);
8958 ADD_LABEL(ret, splabel);
8959 ADD_ADJUST(ret, line_node, 0);
8962 CHECK(COMPILE(ret,
"return nd_stts (return val)", retval));
8964 if (
type == ISEQ_TYPE_METHOD && can_add_ensure_iseq(iseq)) {
8965 add_ensure_iseq(ret, iseq, 1);
8967 ADD_INSN(ret, line_node, leave);
8968 ADD_ADJUST_RESTORE(ret, splabel);
8971 ADD_INSN(ret, line_node, putnil);
8975 ADD_INSN1(ret, line_node,
throw,
INT2FIX(TAG_RETURN));
8977 ADD_INSN(ret, line_node, pop);
8988 if (!i)
return false;
8989 if (IS_TRACE(i)) i = i->prev;
8990 if (!IS_INSN(i) || !IS_INSN_ID(i, putnil))
return false;
8992 if (IS_ADJUST(i)) i = i->prev;
8993 if (!IS_INSN(i))
return false;
8994 switch (INSN_OF(i)) {
9001 (ret->last = last->prev)->next = NULL;
9008 CHECK(COMPILE_(ret,
"nd_body", node, popped));
9010 if (!popped && !all_string_result_p(node)) {
9011 const NODE *line_node = node;
9012 const unsigned int flag = VM_CALL_FCALL;
9016 ADD_INSN(ret, line_node, dup);
9017 ADD_INSN1(ret, line_node, objtostring, new_callinfo(iseq, idTo_s, 0, flag, NULL, FALSE));
9018 ADD_INSN(ret, line_node, anytostring);
9026 int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
9028 debugs(
"id: %s idx: %d\n", rb_id2name(
id), idx);
9029 ADD_GETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
9035 LABEL *else_label = NEW_LABEL(nd_line(line_node));
9038 br = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node),
"&.");
9040 ADD_INSN(recv, line_node, dup);
9041 ADD_INSNL(recv, line_node, branchnil, else_label);
9042 add_trace_branch_coverage(iseq, recv, nd_code_loc(node), nd_node_id(node), 0,
"then", br);
9050 if (!else_label)
return;
9051 end_label = NEW_LABEL(nd_line(line_node));
9052 ADD_INSNL(ret, line_node, jump, end_label);
9053 ADD_LABEL(ret, else_label);
9054 add_trace_branch_coverage(iseq, ret, nd_code_loc(node), nd_node_id(node), 1,
"else", branches);
9055 ADD_LABEL(ret, end_label);
9064 if (get_nd_recv(node) &&
9065 (nd_type_p(get_nd_recv(node), NODE_STR) || nd_type_p(get_nd_recv(node), NODE_FILE)) &&
9066 (get_node_call_nd_mid(node) == idFreeze || get_node_call_nd_mid(node) == idUMinus) &&
9067 get_nd_args(node) == NULL &&
9068 ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
9069 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
9070 VALUE str = get_string_value(get_nd_recv(node));
9071 if (get_node_call_nd_mid(node) == idUMinus) {
9072 ADD_INSN2(ret, line_node, opt_str_uminus, str,
9073 new_callinfo(iseq, idUMinus, 0, 0, NULL, FALSE));
9076 ADD_INSN2(ret, line_node, opt_str_freeze, str,
9077 new_callinfo(iseq, idFreeze, 0, 0, NULL, FALSE));
9081 ADD_INSN(ret, line_node, pop);
9089iseq_has_builtin_function_table(
const rb_iseq_t *iseq)
9091 return ISEQ_COMPILE_DATA(iseq)->builtin_function_table != NULL;
9095iseq_builtin_function_lookup(
const rb_iseq_t *iseq,
const char *name)
9098 const struct rb_builtin_function *table = ISEQ_COMPILE_DATA(iseq)->builtin_function_table;
9099 for (i=0; table[i].index != -1; i++) {
9100 if (strcmp(table[i].name, name) == 0) {
9108iseq_builtin_function_name(
const enum node_type
type,
const NODE *recv,
ID mid)
9110 const char *name = rb_id2name(mid);
9111 static const char prefix[] =
"__builtin_";
9112 const size_t prefix_len =
sizeof(prefix) - 1;
9117 switch (nd_type(recv)) {
9119 if (RNODE_VCALL(recv)->nd_mid == rb_intern(
"__builtin")) {
9124 if (RNODE_CONST(recv)->nd_vid == rb_intern(
"Primitive")) {
9134 if (UNLIKELY(strncmp(prefix, name, prefix_len) == 0)) {
9135 return &name[prefix_len];
9144delegate_call_p(
const rb_iseq_t *iseq,
unsigned int argc,
const LINK_ANCHOR *args,
unsigned int *pstart_index)
9151 else if (argc <= ISEQ_BODY(iseq)->local_table_size) {
9152 unsigned int start=0;
9157 argc + start <= ISEQ_BODY(iseq)->local_table_size;
9161 for (
unsigned int i=start; i-start<argc; i++) {
9162 if (IS_INSN(elem) &&
9163 INSN_OF(elem) == BIN(getlocal)) {
9164 int local_index =
FIX2INT(OPERAND_AT(elem, 0));
9165 int local_level =
FIX2INT(OPERAND_AT(elem, 1));
9167 if (local_level == 0) {
9168 unsigned int index = ISEQ_BODY(iseq)->local_table_size - (local_index - VM_ENV_DATA_SIZE + 1);
9170 fprintf(stderr,
"lvar:%s (%d), id:%s (%d) local_index:%d, local_size:%d\n",
9171 rb_id2name(ISEQ_BODY(iseq)->local_table[i]), i,
9172 rb_id2name(ISEQ_BODY(iseq)->local_table[index]), index,
9173 local_index, (
int)ISEQ_BODY(iseq)->local_table_size);
9197 *pstart_index = start;
9211 if (!node)
goto no_arg;
9213 if (!nd_type_p(node, NODE_LIST))
goto bad_arg;
9214 const NODE *next = RNODE_LIST(node)->nd_next;
9216 node = RNODE_LIST(node)->nd_head;
9217 if (!node)
goto no_arg;
9218 switch (nd_type(node)) {
9220 symbol = rb_node_sym_string_val(node);
9226 if (!
SYMBOL_P(symbol))
goto non_symbol_arg;
9229 if (strcmp(RSTRING_PTR(
string),
"leaf") == 0) {
9230 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_LEAF;
9232 else if (strcmp(RSTRING_PTR(
string),
"inline_block") == 0) {
9233 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_INLINE_BLOCK;
9235 else if (strcmp(RSTRING_PTR(
string),
"use_block") == 0) {
9236 iseq_set_use_block(iseq);
9238 else if (strcmp(RSTRING_PTR(
string),
"c_trace") == 0) {
9240 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_C_TRACE;
9242 else if (strcmp(RSTRING_PTR(
string),
"without_interrupts") == 0) {
9243 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_WITHOUT_INTERRUPTS;
9252 COMPILE_ERROR(ERROR_ARGS
"attr!: no argument");
9255 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to attr!: %s", rb_builtin_class_name(symbol));
9258 COMPILE_ERROR(ERROR_ARGS
"unknown argument to attr!: %s", RSTRING_PTR(
string));
9261 UNKNOWN_NODE(
"attr!", node, COMPILE_NG);
9269 if (!node)
goto no_arg;
9270 if (!nd_type_p(node, NODE_LIST))
goto bad_arg;
9271 if (RNODE_LIST(node)->nd_next)
goto too_many_arg;
9272 node = RNODE_LIST(node)->nd_head;
9273 if (!node)
goto no_arg;
9274 switch (nd_type(node)) {
9276 name = rb_node_sym_string_val(node);
9281 if (!
SYMBOL_P(name))
goto non_symbol_arg;
9283 compile_lvar(iseq, ret, line_node,
SYM2ID(name));
9287 COMPILE_ERROR(ERROR_ARGS
"arg!: no argument");
9290 COMPILE_ERROR(ERROR_ARGS
"arg!: too many argument");
9293 COMPILE_ERROR(ERROR_ARGS
"non symbol argument to arg!: %s",
9294 rb_builtin_class_name(name));
9297 UNKNOWN_NODE(
"arg!", node, COMPILE_NG);
9303 const NODE *node = ISEQ_COMPILE_DATA(iseq)->root_node;
9304 if (nd_type(node) == NODE_IF && RNODE_IF(node)->nd_cond == cond_node) {
9305 return RNODE_IF(node)->nd_body;
9308 rb_bug(
"mandatory_node: can't find mandatory node");
9313compile_builtin_mandatory_only_method(
rb_iseq_t *iseq,
const NODE *node,
const NODE *line_node)
9317 .pre_args_num = ISEQ_BODY(iseq)->param.lead_num,
9320 rb_node_init(RNODE(&args_node), NODE_ARGS);
9321 args_node.nd_ainfo = args;
9324 const int skip_local_size = ISEQ_BODY(iseq)->param.size - ISEQ_BODY(iseq)->param.lead_num;
9325 const int table_size = ISEQ_BODY(iseq)->local_table_size - skip_local_size;
9329 tbl->size = table_size;
9334 for (i=0; i<ISEQ_BODY(iseq)->param.lead_num; i++) {
9335 tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i];
9338 for (; i<table_size; i++) {
9339 tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i + skip_local_size];
9343 rb_node_init(RNODE(&scope_node), NODE_SCOPE);
9344 scope_node.nd_tbl = tbl;
9345 scope_node.nd_body = mandatory_node(iseq, node);
9346 scope_node.nd_parent = NULL;
9347 scope_node.nd_args = &args_node;
9349 VALUE ast_value = rb_ruby_ast_new(RNODE(&scope_node));
9352 rb_iseq_new_with_opt(ast_value, rb_iseq_base_label(iseq),
9353 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
9354 nd_line(line_node), NULL, 0,
9355 ISEQ_TYPE_METHOD, ISEQ_COMPILE_DATA(iseq)->option,
9356 ISEQ_BODY(iseq)->variable.script_lines);
9357 RB_OBJ_WRITE(iseq, &ISEQ_BODY(iseq)->mandatory_only_iseq, (
VALUE)mandatory_only_iseq);
9367 NODE *args_node = get_nd_args(node);
9369 if (parent_block != NULL) {
9370 COMPILE_ERROR(ERROR_ARGS_AT(line_node)
"should not call builtins here.");
9374# define BUILTIN_INLINE_PREFIX "_bi"
9375 char inline_func[
sizeof(BUILTIN_INLINE_PREFIX) +
DECIMAL_SIZE_OF(
int)];
9376 bool cconst =
false;
9381 if (strcmp(
"cstmt!", builtin_func) == 0 ||
9382 strcmp(
"cexpr!", builtin_func) == 0) {
9385 else if (strcmp(
"cconst!", builtin_func) == 0) {
9388 else if (strcmp(
"cinit!", builtin_func) == 0) {
9392 else if (strcmp(
"attr!", builtin_func) == 0) {
9393 return compile_builtin_attr(iseq, args_node);
9395 else if (strcmp(
"arg!", builtin_func) == 0) {
9396 return compile_builtin_arg(iseq, ret, args_node, line_node, popped);
9398 else if (strcmp(
"mandatory_only?", builtin_func) == 0) {
9400 rb_bug(
"mandatory_only? should be in if condition");
9402 else if (!LIST_INSN_SIZE_ZERO(ret)) {
9403 rb_bug(
"mandatory_only? should be put on top");
9406 ADD_INSN1(ret, line_node, putobject,
Qfalse);
9407 return compile_builtin_mandatory_only_method(iseq, node, line_node);
9410 rb_bug(
"can't find builtin function:%s", builtin_func);
9413 COMPILE_ERROR(ERROR_ARGS
"can't find builtin function:%s", builtin_func);
9417 int inline_index = nd_line(node);
9418 snprintf(inline_func,
sizeof(inline_func), BUILTIN_INLINE_PREFIX
"%d", inline_index);
9419 builtin_func = inline_func;
9425 typedef VALUE(*builtin_func0)(
void *,
VALUE);
9426 VALUE const_val = (*(builtin_func0)(uintptr_t)bf->func_ptr)(NULL,
Qnil);
9427 ADD_INSN1(ret, line_node, putobject, const_val);
9433 unsigned int flag = 0;
9435 VALUE argc = setup_args(iseq, args, args_node, &flag, &keywords);
9437 if (
FIX2INT(argc) != bf->argc) {
9438 COMPILE_ERROR(ERROR_ARGS
"argc is not match for builtin function:%s (expect %d but %d)",
9439 builtin_func, bf->argc,
FIX2INT(argc));
9443 unsigned int start_index;
9444 if (delegate_call_p(iseq,
FIX2INT(argc), args, &start_index)) {
9445 ADD_INSN2(ret, line_node, opt_invokebuiltin_delegate, bf,
INT2FIX(start_index));
9449 ADD_INSN1(ret, line_node, invokebuiltin, bf);
9452 if (popped) ADD_INSN(ret, line_node, pop);
9458compile_call(
rb_iseq_t *iseq,
LINK_ANCHOR *
const ret,
const NODE *
const node,
const enum node_type
type,
const NODE *
const line_node,
int popped,
bool assume_receiver)
9466 ID mid = get_node_call_nd_mid(node);
9468 unsigned int flag = 0;
9470 const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9471 LABEL *else_label = NULL;
9474 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
9480 if (nd_type_p(node, NODE_VCALL)) {
9485 CONST_ID(id_answer,
"the_answer_to_life_the_universe_and_everything");
9487 if (mid == id_bitblt) {
9488 ADD_INSN(ret, line_node, bitblt);
9491 else if (mid == id_answer) {
9492 ADD_INSN(ret, line_node, answer);
9504 if (nd_type_p(node, NODE_FCALL) &&
9505 (mid == goto_id || mid == label_id)) {
9508 st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
9511 if (!labels_table) {
9512 labels_table = st_init_numtable();
9513 ISEQ_COMPILE_DATA(iseq)->labels_table = labels_table;
9516 COMPILE_ERROR(ERROR_ARGS
"invalid goto/label format");
9520 if (mid == goto_id) {
9521 ADD_INSNL(ret, line_node, jump, label);
9524 ADD_LABEL(ret, label);
9531 const char *builtin_func;
9532 if (UNLIKELY(iseq_has_builtin_function_table(iseq)) &&
9533 (builtin_func = iseq_builtin_function_name(
type, get_nd_recv(node), mid)) != NULL) {
9534 return compile_builtin_function_call(iseq, ret, node, line_node, popped, parent_block, args, builtin_func);
9538 if (!assume_receiver) {
9539 if (
type == NODE_CALL ||
type == NODE_OPCALL ||
type == NODE_QCALL) {
9542 if (mid == idCall &&
9543 nd_type_p(get_nd_recv(node), NODE_LVAR) &&
9544 iseq_block_param_id_p(iseq, RNODE_LVAR(get_nd_recv(node))->nd_vid, &idx, &level)) {
9545 ADD_INSN2(recv, get_nd_recv(node), getblockparamproxy,
INT2FIX(idx + VM_ENV_DATA_SIZE - 1),
INT2FIX(level));
9547 else if (private_recv_p(node)) {
9548 ADD_INSN(recv, node, putself);
9549 flag |= VM_CALL_FCALL;
9552 CHECK(COMPILE(recv,
"recv", get_nd_recv(node)));
9555 if (
type == NODE_QCALL) {
9556 else_label = qcall_branch_start(iseq, recv, &branches, node, line_node);
9559 else if (
type == NODE_FCALL ||
type == NODE_VCALL) {
9560 ADD_CALL_RECEIVER(recv, line_node);
9565 if (
type != NODE_VCALL) {
9566 argc = setup_args(iseq, args, get_nd_args(node), &flag, &keywords);
9567 CHECK(!
NIL_P(argc));
9575 bool inline_new = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction &&
9576 mid == rb_intern(
"new") &&
9577 parent_block == NULL &&
9578 !(flag & VM_CALL_ARGS_BLOCKARG);
9581 ADD_INSN(ret, node, putnil);
9582 ADD_INSN(ret, node, swap);
9587 debugp_param(
"call args argc", argc);
9588 debugp_param(
"call method",
ID2SYM(mid));
9590 switch ((
int)
type) {
9592 flag |= VM_CALL_VCALL;
9595 flag |= VM_CALL_FCALL;
9598 if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
9599 ADD_INSN(ret, line_node, splatkw);
9602 LABEL *not_basic_new = NEW_LABEL(nd_line(node));
9603 LABEL *not_basic_new_finish = NEW_LABEL(nd_line(node));
9608 if (flag & VM_CALL_FORWARDING) {
9609 ci = (
VALUE)new_callinfo(iseq, mid,
NUM2INT(argc) + 1, flag, keywords, 0);
9612 ci = (
VALUE)new_callinfo(iseq, mid,
NUM2INT(argc), flag, keywords, 0);
9614 ADD_INSN2(ret, node, opt_new, ci, not_basic_new);
9615 LABEL_REF(not_basic_new);
9618 ADD_SEND_R(ret, line_node, rb_intern(
"initialize"), argc, parent_block,
INT2FIX(flag | VM_CALL_FCALL), keywords);
9619 ADD_INSNL(ret, line_node, jump, not_basic_new_finish);
9621 ADD_LABEL(ret, not_basic_new);
9623 ADD_SEND_R(ret, line_node, mid, argc, parent_block,
INT2FIX(flag), keywords);
9624 ADD_INSN(ret, line_node, swap);
9626 ADD_LABEL(ret, not_basic_new_finish);
9627 ADD_INSN(ret, line_node, pop);
9630 ADD_SEND_R(ret, line_node, mid, argc, parent_block,
INT2FIX(flag), keywords);
9633 qcall_branch_end(iseq, ret, else_label, branches, node, line_node);
9635 ADD_INSN(ret, line_node, pop);
9643 const int line = nd_line(node);
9645 unsigned int flag = 0;
9647 ID id = RNODE_OP_ASGN1(node)->nd_mid;
9673 ADD_INSN(ret, node, putnil);
9675 asgnflag = COMPILE_RECV(ret,
"NODE_OP_ASGN1 recv", node, RNODE_OP_ASGN1(node)->nd_recv);
9676 CHECK(asgnflag != -1);
9677 switch (nd_type(RNODE_OP_ASGN1(node)->nd_index)) {
9682 argc = setup_args(iseq, ret, RNODE_OP_ASGN1(node)->nd_index, &flag, NULL);
9683 CHECK(!
NIL_P(argc));
9685 int dup_argn =
FIX2INT(argc) + 1;
9686 ADD_INSN1(ret, node, dupn,
INT2FIX(dup_argn));
9688 ADD_SEND_R(ret, node, idAREF, argc, NULL,
INT2FIX(flag & ~VM_CALL_ARGS_SPLAT_MUT), NULL);
9690 if (
id == idOROP ||
id == idANDOP) {
9699 LABEL *label = NEW_LABEL(line);
9700 LABEL *lfin = NEW_LABEL(line);
9702 ADD_INSN(ret, node, dup);
9704 ADD_INSNL(ret, node, branchif, label);
9707 ADD_INSNL(ret, node, branchunless, label);
9709 ADD_INSN(ret, node, pop);
9711 CHECK(COMPILE(ret,
"NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9713 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9715 if (flag & VM_CALL_ARGS_SPLAT) {
9716 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9717 ADD_INSN(ret, node, swap);
9718 ADD_INSN1(ret, node, splatarray,
Qtrue);
9719 ADD_INSN(ret, node, swap);
9720 flag |= VM_CALL_ARGS_SPLAT_MUT;
9722 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9723 ADD_SEND_R(ret, node, idASET, argc, NULL,
INT2FIX(flag), NULL);
9726 ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL,
INT2FIX(flag), NULL);
9728 ADD_INSN(ret, node, pop);
9729 ADD_INSNL(ret, node, jump, lfin);
9730 ADD_LABEL(ret, label);
9732 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9734 ADD_INSN1(ret, node, adjuststack,
INT2FIX(dup_argn+1));
9735 ADD_LABEL(ret, lfin);
9738 CHECK(COMPILE(ret,
"NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9739 ADD_SEND(ret, node,
id,
INT2FIX(1));
9741 ADD_INSN1(ret, node, setn,
INT2FIX(dup_argn+1));
9743 if (flag & VM_CALL_ARGS_SPLAT) {
9744 if (flag & VM_CALL_KW_SPLAT) {
9745 ADD_INSN1(ret, node, topn,
INT2FIX(2));
9746 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9747 ADD_INSN1(ret, node, splatarray,
Qtrue);
9748 flag |= VM_CALL_ARGS_SPLAT_MUT;
9750 ADD_INSN(ret, node, swap);
9751 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9752 ADD_INSN1(ret, node, setn,
INT2FIX(2));
9753 ADD_INSN(ret, node, pop);
9756 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9757 ADD_INSN(ret, node, swap);
9758 ADD_INSN1(ret, node, splatarray,
Qtrue);
9759 ADD_INSN(ret, node, swap);
9760 flag |= VM_CALL_ARGS_SPLAT_MUT;
9762 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
9764 ADD_SEND_R(ret, node, idASET, argc, NULL,
INT2FIX(flag), NULL);
9767 ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL,
INT2FIX(flag), NULL);
9769 ADD_INSN(ret, node, pop);
9777 const int line = nd_line(node);
9778 ID atype = RNODE_OP_ASGN2(node)->nd_mid;
9779 ID vid = RNODE_OP_ASGN2(node)->nd_vid, aid = rb_id_attrset(vid);
9781 LABEL *lfin = NEW_LABEL(line);
9782 LABEL *lcfin = NEW_LABEL(line);
9837 asgnflag = COMPILE_RECV(ret,
"NODE_OP_ASGN2#recv", node, RNODE_OP_ASGN2(node)->nd_recv);
9838 CHECK(asgnflag != -1);
9839 if (RNODE_OP_ASGN2(node)->nd_aid) {
9840 lskip = NEW_LABEL(line);
9841 ADD_INSN(ret, node, dup);
9842 ADD_INSNL(ret, node, branchnil, lskip);
9844 ADD_INSN(ret, node, dup);
9845 ADD_SEND_WITH_FLAG(ret, node, vid,
INT2FIX(0),
INT2FIX(asgnflag));
9847 if (atype == idOROP || atype == idANDOP) {
9849 ADD_INSN(ret, node, dup);
9851 if (atype == idOROP) {
9852 ADD_INSNL(ret, node, branchif, lcfin);
9855 ADD_INSNL(ret, node, branchunless, lcfin);
9858 ADD_INSN(ret, node, pop);
9860 CHECK(COMPILE(ret,
"NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9862 ADD_INSN(ret, node, swap);
9863 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9865 ADD_SEND_WITH_FLAG(ret, node, aid,
INT2FIX(1),
INT2FIX(asgnflag));
9866 ADD_INSNL(ret, node, jump, lfin);
9868 ADD_LABEL(ret, lcfin);
9870 ADD_INSN(ret, node, swap);
9873 ADD_LABEL(ret, lfin);
9876 CHECK(COMPILE(ret,
"NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9877 ADD_SEND(ret, node, atype,
INT2FIX(1));
9879 ADD_INSN(ret, node, swap);
9880 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9882 ADD_SEND_WITH_FLAG(ret, node, aid,
INT2FIX(1),
INT2FIX(asgnflag));
9884 if (lskip && popped) {
9885 ADD_LABEL(ret, lskip);
9887 ADD_INSN(ret, node, pop);
9888 if (lskip && !popped) {
9889 ADD_LABEL(ret, lskip);
9894static int compile_shareable_constant_value(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
const NODE *lhs,
const NODE *value);
9899 const int line = nd_line(node);
9904 switch (nd_type(RNODE_OP_CDECL(node)->nd_head)) {
9909 CHECK(COMPILE(ret,
"NODE_OP_CDECL/colon2#nd_head", RNODE_COLON2(RNODE_OP_CDECL(node)->nd_head)->nd_head));
9912 COMPILE_ERROR(ERROR_ARGS
"%s: invalid node in NODE_OP_CDECL",
9913 ruby_node_name(nd_type(RNODE_OP_CDECL(node)->nd_head)));
9916 mid = get_node_colon_nd_mid(RNODE_OP_CDECL(node)->nd_head);
9918 if (RNODE_OP_CDECL(node)->nd_aid == idOROP) {
9919 lassign = NEW_LABEL(line);
9920 ADD_INSN(ret, node, dup);
9921 ADD_INSN3(ret, node, defined,
INT2FIX(DEFINED_CONST_FROM),
9923 ADD_INSNL(ret, node, branchunless, lassign);
9925 ADD_INSN(ret, node, dup);
9926 ADD_INSN1(ret, node, putobject,
Qtrue);
9927 ADD_INSN1(ret, node, getconstant,
ID2SYM(mid));
9929 if (RNODE_OP_CDECL(node)->nd_aid == idOROP || RNODE_OP_CDECL(node)->nd_aid == idANDOP) {
9930 lfin = NEW_LABEL(line);
9931 if (!popped) ADD_INSN(ret, node, dup);
9932 if (RNODE_OP_CDECL(node)->nd_aid == idOROP)
9933 ADD_INSNL(ret, node, branchif, lfin);
9935 ADD_INSNL(ret, node, branchunless, lfin);
9937 if (!popped) ADD_INSN(ret, node, pop);
9938 if (lassign) ADD_LABEL(ret, lassign);
9939 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9942 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9944 ADD_INSN1(ret, node, dupn,
INT2FIX(2));
9945 ADD_INSN(ret, node, swap);
9947 ADD_INSN1(ret, node, setconstant,
ID2SYM(mid));
9948 ADD_LABEL(ret, lfin);
9949 if (!popped) ADD_INSN(ret, node, swap);
9950 ADD_INSN(ret, node, pop);
9953 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9955 ADD_CALL(ret, node, RNODE_OP_CDECL(node)->nd_aid,
INT2FIX(1));
9957 ADD_INSN(ret, node, swap);
9959 ADD_INSN1(ret, node, topn,
INT2FIX(1));
9960 ADD_INSN(ret, node, swap);
9962 ADD_INSN1(ret, node, setconstant,
ID2SYM(mid));
9970 const int line = nd_line(node);
9971 LABEL *lfin = NEW_LABEL(line);
9974 if (
type == NODE_OP_ASGN_OR && !nd_type_p(RNODE_OP_ASGN_OR(node)->nd_head, NODE_IVAR)) {
9978 defined_expr(iseq, ret, RNODE_OP_ASGN_OR(node)->nd_head, lfinish,
Qfalse,
false);
9979 lassign = lfinish[1];
9981 lassign = NEW_LABEL(line);
9983 ADD_INSNL(ret, node, branchunless, lassign);
9986 lassign = NEW_LABEL(line);
9989 CHECK(COMPILE(ret,
"NODE_OP_ASGN_AND/OR#nd_head", RNODE_OP_ASGN_OR(node)->nd_head));
9992 ADD_INSN(ret, node, dup);
9995 if (
type == NODE_OP_ASGN_AND) {
9996 ADD_INSNL(ret, node, branchunless, lfin);
9999 ADD_INSNL(ret, node, branchif, lfin);
10003 ADD_INSN(ret, node, pop);
10006 ADD_LABEL(ret, lassign);
10007 CHECK(COMPILE_(ret,
"NODE_OP_ASGN_AND/OR#nd_value", RNODE_OP_ASGN_OR(node)->nd_value, popped));
10008 ADD_LABEL(ret, lfin);
10018 unsigned int flag = 0;
10020 const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
10024 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
10026 if (
type == NODE_SUPER) {
10027 VALUE vargc = setup_args(iseq, args, RNODE_SUPER(node)->nd_args, &flag, &keywords);
10028 CHECK(!
NIL_P(vargc));
10030 if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
10031 ADD_INSN(args, node, splatkw);
10034 if (flag & VM_CALL_ARGS_BLOCKARG) {
10041 const rb_iseq_t *liseq = body->local_iseq;
10043 const struct rb_iseq_param_keyword *
const local_kwd = local_body->param.keyword;
10044 int lvar_level = get_lvar_level(iseq);
10046 argc = local_body->param.lead_num;
10049 for (i = 0; i < local_body->param.lead_num; i++) {
10050 int idx = local_body->local_table_size - i;
10051 ADD_GETLOCAL(args, node, idx, lvar_level);
10055 if (local_body->param.flags.forwardable) {
10056 flag |= VM_CALL_FORWARDING;
10057 int idx = local_body->local_table_size - get_local_var_idx(liseq, idDot3);
10058 ADD_GETLOCAL(args, node, idx, lvar_level);
10061 if (local_body->param.flags.has_opt) {
10064 for (j = 0; j < local_body->param.opt_num; j++) {
10065 int idx = local_body->local_table_size - (i + j);
10066 ADD_GETLOCAL(args, node, idx, lvar_level);
10071 if (local_body->param.flags.has_rest) {
10073 int idx = local_body->local_table_size - local_body->param.rest_start;
10074 ADD_GETLOCAL(args, node, idx, lvar_level);
10075 ADD_INSN1(args, node, splatarray, RBOOL(local_body->param.flags.has_post));
10077 argc = local_body->param.rest_start + 1;
10078 flag |= VM_CALL_ARGS_SPLAT;
10080 if (local_body->param.flags.has_post) {
10082 int post_len = local_body->param.post_num;
10083 int post_start = local_body->param.post_start;
10085 if (local_body->param.flags.has_rest) {
10087 for (j=0; j<post_len; j++) {
10088 int idx = local_body->local_table_size - (post_start + j);
10089 ADD_GETLOCAL(args, node, idx, lvar_level);
10091 ADD_INSN1(args, node, pushtoarray,
INT2FIX(j));
10092 flag |= VM_CALL_ARGS_SPLAT_MUT;
10097 for (j=0; j<post_len; j++) {
10098 int idx = local_body->local_table_size - (post_start + j);
10099 ADD_GETLOCAL(args, node, idx, lvar_level);
10101 argc = post_len + post_start;
10105 if (local_body->param.flags.has_kw) {
10106 int local_size = local_body->local_table_size;
10109 ADD_INSN1(args, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10111 if (local_body->param.flags.has_kwrest) {
10112 int idx = local_body->local_table_size - local_kwd->rest_start;
10113 ADD_GETLOCAL(args, node, idx, lvar_level);
10115 ADD_SEND (args, node, rb_intern(
"dup"),
INT2FIX(0));
10118 ADD_INSN1(args, node, newhash,
INT2FIX(0));
10120 for (i = 0; i < local_kwd->num; ++i) {
10121 ID id = local_kwd->table[i];
10122 int idx = local_size - get_local_var_idx(liseq,
id);
10123 ADD_INSN1(args, node, putobject,
ID2SYM(
id));
10124 ADD_GETLOCAL(args, node, idx, lvar_level);
10126 ADD_SEND(args, node, id_core_hash_merge_ptr,
INT2FIX(i * 2 + 1));
10127 flag |= VM_CALL_KW_SPLAT| VM_CALL_KW_SPLAT_MUT;
10129 else if (local_body->param.flags.has_kwrest) {
10130 int idx = local_body->local_table_size - local_kwd->rest_start;
10131 ADD_GETLOCAL(args, node, idx, lvar_level);
10133 flag |= VM_CALL_KW_SPLAT;
10137 if (use_block && parent_block == NULL) {
10138 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
10141 flag |= VM_CALL_SUPER | VM_CALL_FCALL;
10142 if (
type == NODE_ZSUPER) flag |= VM_CALL_ZSUPER;
10143 ADD_INSN(ret, node, putself);
10144 ADD_SEQ(ret, args);
10146 const struct rb_callinfo * ci = new_callinfo(iseq, 0, argc, flag, keywords, parent_block != NULL);
10148 if (vm_ci_flag(ci) & VM_CALL_FORWARDING) {
10149 ADD_INSN2(ret, node, invokesuperforward, ci, parent_block);
10152 ADD_INSN2(ret, node, invokesuper, ci, parent_block);
10156 ADD_INSN(ret, node, pop);
10166 unsigned int flag = 0;
10171 switch (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->
type) {
10172 case ISEQ_TYPE_TOP:
10173 case ISEQ_TYPE_MAIN:
10174 case ISEQ_TYPE_CLASS:
10175 COMPILE_ERROR(ERROR_ARGS
"Invalid yield");
10180 if (RNODE_YIELD(node)->nd_head) {
10181 argc = setup_args(iseq, args, RNODE_YIELD(node)->nd_head, &flag, &keywords);
10182 CHECK(!
NIL_P(argc));
10188 ADD_SEQ(ret, args);
10189 ADD_INSN1(ret, node, invokeblock, new_callinfo(iseq, 0,
FIX2INT(argc), flag, keywords, FALSE));
10190 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
10193 ADD_INSN(ret, node, pop);
10198 for (; tmp_iseq != ISEQ_BODY(iseq)->local_iseq; level++ ) {
10199 tmp_iseq = ISEQ_BODY(tmp_iseq)->parent_iseq;
10201 if (level > 0) access_outer_variables(iseq, level, rb_intern(
"yield"),
true);
10214 switch ((
int)
type) {
10217 VALUE re = rb_node_regx_string_val(node);
10218 RB_OBJ_SET_FROZEN_SHAREABLE(re);
10219 ADD_INSN1(recv, node, putobject, re);
10220 ADD_INSN2(val, node, getspecial,
INT2FIX(0),
10225 CHECK(COMPILE(recv,
"receiver", RNODE_MATCH2(node)->nd_recv));
10226 CHECK(COMPILE(val,
"value", RNODE_MATCH2(node)->nd_value));
10229 CHECK(COMPILE(recv,
"receiver", RNODE_MATCH3(node)->nd_value));
10230 CHECK(COMPILE(val,
"value", RNODE_MATCH3(node)->nd_recv));
10234 ADD_SEQ(ret, recv);
10236 ADD_SEND(ret, node, idEqTilde,
INT2FIX(1));
10238 if (nd_type_p(node, NODE_MATCH2) && RNODE_MATCH2(node)->nd_args) {
10239 compile_named_capture_assign(iseq, ret, RNODE_MATCH2(node)->nd_args);
10243 ADD_INSN(ret, node, pop);
10254 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache &&
10255 (segments = collect_const_segments(iseq, node))) {
10256 ISEQ_BODY(iseq)->ic_size++;
10257 ADD_INSN1(ret, node, opt_getconstant_path, segments);
10267 CHECK(compile_const_prefix(iseq, node, pref, body));
10268 if (LIST_INSN_SIZE_ZERO(pref)) {
10269 ADD_INSN(ret, node, putnil);
10270 ADD_SEQ(ret, body);
10273 ADD_SEQ(ret, pref);
10274 ADD_SEQ(ret, body);
10280 ADD_CALL_RECEIVER(ret, node);
10281 CHECK(COMPILE(ret,
"colon2#nd_head", RNODE_COLON2(node)->nd_head));
10282 ADD_CALL(ret, node, RNODE_COLON2(node)->nd_mid,
INT2FIX(1));
10285 ADD_INSN(ret, node, pop);
10293 debugi(
"colon3#nd_mid", RNODE_COLON3(node)->nd_mid);
10296 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
10297 ISEQ_BODY(iseq)->ic_size++;
10298 VALUE segments = rb_ary_new_from_args(2,
ID2SYM(idNULL),
ID2SYM(RNODE_COLON3(node)->nd_mid));
10299 RB_OBJ_SET_FROZEN_SHAREABLE(segments);
10300 ADD_INSN1(ret, node, opt_getconstant_path, segments);
10304 ADD_INSN1(ret, node, putobject,
rb_cObject);
10305 ADD_INSN1(ret, node, putobject,
Qtrue);
10306 ADD_INSN1(ret, node, getconstant,
ID2SYM(RNODE_COLON3(node)->nd_mid));
10310 ADD_INSN(ret, node, pop);
10319 const NODE *b = RNODE_DOT2(node)->nd_beg;
10320 const NODE *e = RNODE_DOT2(node)->nd_end;
10322 if (optimizable_range_item_p(b) && optimizable_range_item_p(e)) {
10324 VALUE bv = optimized_range_item(b);
10325 VALUE ev = optimized_range_item(e);
10328 ADD_INSN1(ret, node, putobject, val);
10333 CHECK(COMPILE_(ret,
"min", b, popped));
10334 CHECK(COMPILE_(ret,
"max", e, popped));
10336 ADD_INSN1(ret, node, newrange, flag);
10346 if (ISEQ_BODY(iseq)->
type == ISEQ_TYPE_RESCUE) {
10347 ADD_GETLOCAL(ret, node, LVAR_ERRINFO, 0);
10353 if (ISEQ_BODY(ip)->
type == ISEQ_TYPE_RESCUE) {
10356 ip = ISEQ_BODY(ip)->parent_iseq;
10360 ADD_GETLOCAL(ret, node, LVAR_ERRINFO, level);
10363 ADD_INSN(ret, node, putnil);
10374 LABEL *end_label = NEW_LABEL(nd_line(node));
10375 const NODE *default_value = get_nd_value(RNODE_KW_ARG(node)->nd_body);
10377 if (default_value == NODE_SPECIAL_REQUIRED_KEYWORD) {
10379 COMPILE_ERROR(ERROR_ARGS
"unreachable");
10382 else if (nd_type_p(default_value, NODE_SYM) ||
10383 nd_type_p(default_value, NODE_REGX) ||
10384 nd_type_p(default_value, NODE_LINE) ||
10385 nd_type_p(default_value, NODE_INTEGER) ||
10386 nd_type_p(default_value, NODE_FLOAT) ||
10387 nd_type_p(default_value, NODE_RATIONAL) ||
10388 nd_type_p(default_value, NODE_IMAGINARY) ||
10389 nd_type_p(default_value, NODE_NIL) ||
10390 nd_type_p(default_value, NODE_TRUE) ||
10391 nd_type_p(default_value, NODE_FALSE)) {
10392 COMPILE_ERROR(ERROR_ARGS
"unreachable");
10400 int kw_bits_idx = body->local_table_size - body->param.keyword->bits_start;
10401 int keyword_idx = body->param.keyword->num;
10403 ADD_INSN2(ret, node, checkkeyword,
INT2FIX(kw_bits_idx + VM_ENV_DATA_SIZE - 1),
INT2FIX(keyword_idx));
10404 ADD_INSNL(ret, node, branchif, end_label);
10405 CHECK(COMPILE_POPPED(ret,
"keyword default argument", RNODE_KW_ARG(node)->nd_body));
10406 ADD_LABEL(ret, end_label);
10416 unsigned int flag = 0;
10417 ID mid = RNODE_ATTRASGN(node)->nd_mid;
10419 LABEL *else_label = NULL;
10424 argc = setup_args(iseq, args, RNODE_ATTRASGN(node)->nd_args, &flag, NULL);
10425 CHECK(!
NIL_P(argc));
10427 int asgnflag = COMPILE_RECV(recv,
"recv", node, RNODE_ATTRASGN(node)->nd_recv);
10428 CHECK(asgnflag != -1);
10429 flag |= (
unsigned int)asgnflag;
10431 debugp_param(
"argc", argc);
10432 debugp_param(
"nd_mid",
ID2SYM(mid));
10436 mid = rb_id_attrset(mid);
10437 else_label = qcall_branch_start(iseq, recv, &branches, node, node);
10440 ADD_INSN(ret, node, putnil);
10441 ADD_SEQ(ret, recv);
10442 ADD_SEQ(ret, args);
10444 if (flag & VM_CALL_ARGS_SPLAT) {
10445 ADD_INSN(ret, node, dup);
10446 ADD_INSN1(ret, node, putobject,
INT2FIX(-1));
10447 ADD_SEND_WITH_FLAG(ret, node, idAREF,
INT2FIX(1),
INT2FIX(asgnflag));
10448 ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 2));
10449 ADD_INSN (ret, node, pop);
10452 ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 1));
10456 ADD_SEQ(ret, recv);
10457 ADD_SEQ(ret, args);
10459 ADD_SEND_WITH_FLAG(ret, node, mid, argc,
INT2FIX(flag));
10460 qcall_branch_end(iseq, ret, else_label, branches, node, node);
10461 ADD_INSN(ret, node, pop);
10468 ADD_INSN1(ret, value, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10476 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"make_shareable_copy"),
INT2FIX(1),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10483 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"make_shareable"),
INT2FIX(1),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10490node_const_decl_val(
const NODE *node)
10493 switch (nd_type(node)) {
10495 if (RNODE_CDECL(node)->nd_vid) {
10496 path = rb_id2str(RNODE_CDECL(node)->nd_vid);
10500 node = RNODE_CDECL(node)->nd_else;
10508 rb_str_append(path, rb_id2str(RNODE_COLON3(node)->nd_mid));
10511 rb_bug(
"unexpected node: %s", ruby_node_name(nd_type(node)));
10517 for (; node && nd_type_p(node, NODE_COLON2); node = RNODE_COLON2(node)->nd_head) {
10518 rb_ary_push(path, rb_id2str(RNODE_COLON2(node)->nd_mid));
10520 if (node && nd_type_p(node, NODE_CONST)) {
10522 rb_ary_push(path, rb_id2str(RNODE_CONST(node)->nd_vid));
10524 else if (node && nd_type_p(node, NODE_COLON3)) {
10526 rb_ary_push(path, rb_id2str(RNODE_COLON3(node)->nd_mid));
10536 path = rb_fstring(path);
10541const_decl_path(
NODE *dest)
10544 if (!nd_type_p(dest, NODE_CALL)) {
10545 path = node_const_decl_val(dest);
10556 VALUE path = const_decl_path(dest);
10557 ADD_INSN1(ret, value, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10558 CHECK(COMPILE(ret,
"compile_ensure_shareable_node", value));
10559 ADD_INSN1(ret, value, putobject, path);
10561 ADD_SEND_WITH_FLAG(ret, value, rb_intern(
"ensure_shareable"),
INT2FIX(2),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10566#ifndef SHAREABLE_BARE_EXPRESSION
10567#define SHAREABLE_BARE_EXPRESSION 1
10571compile_shareable_literal_constant(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
NODE *dest,
const NODE *node,
size_t level,
VALUE *value_p,
int *shareable_literal_p)
10573# define compile_shareable_literal_constant_next(node, anchor, value_p, shareable_literal_p) \
10574 compile_shareable_literal_constant(iseq, anchor, shareable, dest, node, level+1, value_p, shareable_literal_p)
10576 DECL_ANCHOR(anchor);
10578 enum node_type
type = node ? nd_type(node) : NODE_NIL;
10590 *value_p = rb_node_sym_string_val(node);
10593 *value_p = rb_node_regx_string_val(node);
10596 *value_p = rb_node_line_lineno_val(node);
10599 *value_p = rb_node_integer_literal_val(node);
10602 *value_p = rb_node_float_literal_val(node);
10604 case NODE_RATIONAL:
10605 *value_p = rb_node_rational_literal_val(node);
10607 case NODE_IMAGINARY:
10608 *value_p = rb_node_imaginary_literal_val(node);
10610 case NODE_ENCODING:
10611 *value_p = rb_node_encoding_val(node);
10614 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10615 *shareable_literal_p = 1;
10619 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10620 if (shareable == rb_parser_shareable_literal) {
10626 ADD_SEND_WITH_FLAG(ret, node, idUMinus,
INT2FIX(0),
INT2FIX(VM_CALL_ARGS_SIMPLE));
10629 *shareable_literal_p = 1;
10633 VALUE lit = rb_node_str_string_val(node);
10634 ADD_INSN1(ret, node, putobject, lit);
10637 *shareable_literal_p = 1;
10643 VALUE lit = rb_node_file_path_val(node);
10644 ADD_INSN1(ret, node, putobject, lit);
10647 *shareable_literal_p = 1;
10655 ADD_INSN1(ret, node, putobject, lit);
10658 *shareable_literal_p = 1;
10664 INIT_ANCHOR(anchor);
10666 for (
NODE *n = (
NODE *)node; n; n = RNODE_LIST(n)->nd_next) {
10668 int shareable_literal_p2;
10669 NODE *elt = RNODE_LIST(n)->nd_head;
10671 CHECK(compile_shareable_literal_constant_next(elt, anchor, &val, &shareable_literal_p2));
10672 if (shareable_literal_p2) {
10675 else if (
RTEST(lit)) {
10681 if (!UNDEF_P(val)) {
10693 if (!RNODE_HASH(node)->nd_brace) {
10695 *shareable_literal_p = 0;
10698 for (
NODE *n = RNODE_HASH(node)->nd_head; n; n = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_next) {
10699 if (!RNODE_LIST(n)->nd_head) {
10701 goto compile_shareable;
10705 INIT_ANCHOR(anchor);
10706 lit = rb_hash_new();
10707 for (
NODE *n = RNODE_HASH(node)->nd_head; n; n = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_next) {
10709 VALUE value_val = 0;
10710 int shareable_literal_p2;
10711 NODE *key = RNODE_LIST(n)->nd_head;
10712 NODE *val = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head;
10713 CHECK(compile_shareable_literal_constant_next(key, anchor, &key_val, &shareable_literal_p2));
10714 if (shareable_literal_p2) {
10717 else if (
RTEST(lit)) {
10718 rb_hash_clear(lit);
10721 CHECK(compile_shareable_literal_constant_next(val, anchor, &value_val, &shareable_literal_p2));
10722 if (shareable_literal_p2) {
10725 else if (
RTEST(lit)) {
10726 rb_hash_clear(lit);
10730 if (!UNDEF_P(key_val) && !UNDEF_P(value_val)) {
10731 rb_hash_aset(lit, key_val, value_val);
10734 rb_hash_clear(lit);
10745 if (shareable == rb_parser_shareable_literal &&
10746 (SHAREABLE_BARE_EXPRESSION || level > 0)) {
10747 CHECK(compile_ensure_shareable_node(iseq, ret, dest, node));
10749 *shareable_literal_p = 1;
10752 CHECK(COMPILE(ret,
"shareable_literal_constant", node));
10754 *shareable_literal_p = 0;
10760 if (nd_type(node) == NODE_LIST) {
10761 ADD_INSN1(anchor, node, newarray,
INT2FIX(RNODE_LIST(node)->as.nd_alen));
10763 else if (nd_type(node) == NODE_HASH) {
10764 long len = RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10770 *shareable_literal_p = 0;
10771 ADD_SEQ(ret, anchor);
10777 if (nd_type(node) == NODE_LIST) {
10778 ADD_INSN1(anchor, node, newarray,
INT2FIX(RNODE_LIST(node)->as.nd_alen));
10780 else if (nd_type(node) == NODE_HASH) {
10781 long len = RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10786 CHECK(compile_make_shareable_node(iseq, ret, anchor, node,
false));
10788 *shareable_literal_p = 1;
10792 ADD_INSN1(ret, node, putobject, val);
10795 *shareable_literal_p = 1;
10802compile_shareable_constant_value(
rb_iseq_t *iseq,
LINK_ANCHOR *ret,
enum rb_parser_shareability shareable,
const NODE *lhs,
const NODE *value)
10806 DECL_ANCHOR(anchor);
10807 INIT_ANCHOR(anchor);
10809 switch (shareable) {
10810 case rb_parser_shareable_none:
10811 CHECK(COMPILE(ret,
"compile_shareable_constant_value", value));
10814 case rb_parser_shareable_literal:
10815 CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (
NODE *)lhs, value, 0, &val, &literal_p));
10816 ADD_SEQ(ret, anchor);
10819 case rb_parser_shareable_copy:
10820 case rb_parser_shareable_everything:
10821 CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (
NODE *)lhs, value, 0, &val, &literal_p));
10823 CHECK(compile_make_shareable_node(iseq, ret, anchor, value, shareable == rb_parser_shareable_copy));
10826 ADD_SEQ(ret, anchor);
10830 rb_bug(
"unexpected rb_parser_shareability: %d", shareable);
10847 int lineno = ISEQ_COMPILE_DATA(iseq)->last_line;
10848 if (lineno == 0) lineno =
FIX2INT(rb_iseq_first_lineno(iseq));
10849 debugs(
"node: NODE_NIL(implicit)\n");
10850 ADD_SYNTHETIC_INSN(ret, lineno, -1, putnil);
10854 return iseq_compile_each0(iseq, ret, node, popped);
10860 const int line = (int)nd_line(node);
10861 const enum node_type
type = nd_type(node);
10864 if (ISEQ_COMPILE_DATA(iseq)->last_line == line) {
10868 if (nd_fl_newline(node)) {
10870 ISEQ_COMPILE_DATA(iseq)->last_line = line;
10871 if (line > 0 && ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq)) {
10872 event |= RUBY_EVENT_COVERAGE_LINE;
10874 ADD_TRACE(ret, event);
10878 debug_node_start(node);
10879#undef BEFORE_RETURN
10880#define BEFORE_RETURN debug_node_end()
10884 CHECK(compile_block(iseq, ret, node, popped));
10888 CHECK(compile_if(iseq, ret, node, popped,
type));
10891 CHECK(compile_case(iseq, ret, node, popped));
10894 CHECK(compile_case2(iseq, ret, node, popped));
10897 CHECK(compile_case3(iseq, ret, node, popped));
10901 CHECK(compile_loop(iseq, ret, node, popped,
type));
10905 CHECK(compile_iter(iseq, ret, node, popped));
10907 case NODE_FOR_MASGN:
10908 CHECK(compile_for_masgn(iseq, ret, node, popped));
10911 CHECK(compile_break(iseq, ret, node, popped));
10914 CHECK(compile_next(iseq, ret, node, popped));
10917 CHECK(compile_redo(iseq, ret, node, popped));
10920 CHECK(compile_retry(iseq, ret, node, popped));
10923 CHECK(COMPILE_(ret,
"NODE_BEGIN", RNODE_BEGIN(node)->nd_body, popped));
10927 CHECK(compile_rescue(iseq, ret, node, popped));
10930 CHECK(compile_resbody(iseq, ret, node, popped));
10933 CHECK(compile_ensure(iseq, ret, node, popped));
10938 LABEL *end_label = NEW_LABEL(line);
10939 CHECK(COMPILE(ret,
"nd_1st", RNODE_OR(node)->nd_1st));
10941 ADD_INSN(ret, node, dup);
10943 if (
type == NODE_AND) {
10944 ADD_INSNL(ret, node, branchunless, end_label);
10947 ADD_INSNL(ret, node, branchif, end_label);
10950 ADD_INSN(ret, node, pop);
10952 CHECK(COMPILE_(ret,
"nd_2nd", RNODE_OR(node)->nd_2nd, popped));
10953 ADD_LABEL(ret, end_label);
10958 compile_massign(iseq, ret, node, popped);
10963 ID id = RNODE_LASGN(node)->nd_vid;
10964 int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq,
id);
10966 debugs(
"lvar: %s idx: %d\n", rb_id2name(
id), idx);
10967 CHECK(COMPILE(ret,
"rvalue", RNODE_LASGN(node)->nd_value));
10970 ADD_INSN(ret, node, dup);
10972 ADD_SETLOCAL(ret, node, idx, get_lvar_level(iseq));
10977 ID id = RNODE_DASGN(node)->nd_vid;
10978 CHECK(COMPILE(ret,
"dvalue", RNODE_DASGN(node)->nd_value));
10979 debugi(
"dassn id", rb_id2str(
id) ?
id :
'*');
10982 ADD_INSN(ret, node, dup);
10985 idx = get_dyna_var_idx(iseq,
id, &lv, &ls);
10988 COMPILE_ERROR(ERROR_ARGS
"NODE_DASGN: unknown id (%"PRIsVALUE
")",
10992 ADD_SETLOCAL(ret, node, ls - idx, lv);
10996 CHECK(COMPILE(ret,
"lvalue", RNODE_GASGN(node)->nd_value));
10999 ADD_INSN(ret, node, dup);
11001 ADD_INSN1(ret, node, setglobal,
ID2SYM(RNODE_GASGN(node)->nd_vid));
11005 CHECK(COMPILE(ret,
"lvalue", RNODE_IASGN(node)->nd_value));
11007 ADD_INSN(ret, node, dup);
11009 ADD_INSN2(ret, node, setinstancevariable,
11010 ID2SYM(RNODE_IASGN(node)->nd_vid),
11011 get_ivar_ic_value(iseq,RNODE_IASGN(node)->nd_vid));
11015 if (RNODE_CDECL(node)->nd_vid) {
11016 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
11019 ADD_INSN(ret, node, dup);
11022 ADD_INSN1(ret, node, putspecialobject,
11023 INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
11024 ADD_INSN1(ret, node, setconstant,
ID2SYM(RNODE_CDECL(node)->nd_vid));
11027 compile_cpath(ret, iseq, RNODE_CDECL(node)->nd_else);
11028 CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
11029 ADD_INSN(ret, node, swap);
11032 ADD_INSN1(ret, node, topn,
INT2FIX(1));
11033 ADD_INSN(ret, node, swap);
11036 ADD_INSN1(ret, node, setconstant,
ID2SYM(get_node_colon_nd_mid(RNODE_CDECL(node)->nd_else)));
11041 CHECK(COMPILE(ret,
"cvasgn val", RNODE_CVASGN(node)->nd_value));
11043 ADD_INSN(ret, node, dup);
11045 ADD_INSN2(ret, node, setclassvariable,
11046 ID2SYM(RNODE_CVASGN(node)->nd_vid),
11047 get_cvar_ic_value(iseq, RNODE_CVASGN(node)->nd_vid));
11050 case NODE_OP_ASGN1:
11051 CHECK(compile_op_asgn1(iseq, ret, node, popped));
11053 case NODE_OP_ASGN2:
11054 CHECK(compile_op_asgn2(iseq, ret, node, popped));
11056 case NODE_OP_CDECL:
11057 CHECK(compile_op_cdecl(iseq, ret, node, popped));
11059 case NODE_OP_ASGN_AND:
11060 case NODE_OP_ASGN_OR:
11061 CHECK(compile_op_log(iseq, ret, node, popped,
type));
11065 if (compile_call_precheck_freeze(iseq, ret, node, node, popped) == TRUE) {
11071 if (compile_call(iseq, ret, node,
type, node, popped,
false) == COMPILE_NG) {
11077 CHECK(compile_super(iseq, ret, node, popped,
type));
11080 CHECK(compile_array(iseq, ret, node, popped, TRUE) >= 0);
11085 ADD_INSN1(ret, node, newarray,
INT2FIX(0));
11090 CHECK(compile_hash(iseq, ret, node, FALSE, popped) >= 0);
11093 CHECK(compile_return(iseq, ret, node, popped));
11096 CHECK(compile_yield(iseq, ret, node, popped));
11100 compile_lvar(iseq, ret, node, RNODE_LVAR(node)->nd_vid);
11106 debugi(
"nd_vid", RNODE_DVAR(node)->nd_vid);
11108 idx = get_dyna_var_idx(iseq, RNODE_DVAR(node)->nd_vid, &lv, &ls);
11110 COMPILE_ERROR(ERROR_ARGS
"unknown dvar (%"PRIsVALUE
")",
11111 rb_id2str(RNODE_DVAR(node)->nd_vid));
11114 ADD_GETLOCAL(ret, node, ls - idx, lv);
11119 ADD_INSN1(ret, node, getglobal,
ID2SYM(RNODE_GVAR(node)->nd_vid));
11121 ADD_INSN(ret, node, pop);
11126 debugi(
"nd_vid", RNODE_IVAR(node)->nd_vid);
11128 ADD_INSN2(ret, node, getinstancevariable,
11129 ID2SYM(RNODE_IVAR(node)->nd_vid),
11130 get_ivar_ic_value(iseq, RNODE_IVAR(node)->nd_vid));
11135 debugi(
"nd_vid", RNODE_CONST(node)->nd_vid);
11137 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
11139 VALUE segments = rb_ary_new_from_args(1,
ID2SYM(RNODE_CONST(node)->nd_vid));
11140 RB_OBJ_SET_FROZEN_SHAREABLE(segments);
11141 ADD_INSN1(ret, node, opt_getconstant_path, segments);
11145 ADD_INSN(ret, node, putnil);
11146 ADD_INSN1(ret, node, putobject,
Qtrue);
11147 ADD_INSN1(ret, node, getconstant,
ID2SYM(RNODE_CONST(node)->nd_vid));
11151 ADD_INSN(ret, node, pop);
11157 ADD_INSN2(ret, node, getclassvariable,
11158 ID2SYM(RNODE_CVAR(node)->nd_vid),
11159 get_cvar_ic_value(iseq, RNODE_CVAR(node)->nd_vid));
11163 case NODE_NTH_REF:{
11165 if (!RNODE_NTH_REF(node)->nd_nth) {
11166 ADD_INSN(ret, node, putnil);
11169 ADD_INSN2(ret, node, getspecial,
INT2FIX(1) ,
11170 INT2FIX(RNODE_NTH_REF(node)->nd_nth << 1));
11174 case NODE_BACK_REF:{
11176 ADD_INSN2(ret, node, getspecial,
INT2FIX(1) ,
11177 INT2FIX(0x01 | (RNODE_BACK_REF(node)->nd_nth << 1)));
11184 CHECK(compile_match(iseq, ret, node, popped,
type));
11188 ADD_INSN1(ret, node, putobject, rb_node_sym_string_val(node));
11194 ADD_INSN1(ret, node, putobject, rb_node_line_lineno_val(node));
11198 case NODE_ENCODING:{
11200 ADD_INSN1(ret, node, putobject, rb_node_encoding_val(node));
11204 case NODE_INTEGER:{
11205 VALUE lit = rb_node_integer_literal_val(node);
11207 debugp_param(
"integer", lit);
11209 ADD_INSN1(ret, node, putobject, lit);
11215 VALUE lit = rb_node_float_literal_val(node);
11217 debugp_param(
"float", lit);
11219 ADD_INSN1(ret, node, putobject, lit);
11224 case NODE_RATIONAL:{
11225 VALUE lit = rb_node_rational_literal_val(node);
11227 debugp_param(
"rational", lit);
11229 ADD_INSN1(ret, node, putobject, lit);
11234 case NODE_IMAGINARY:{
11235 VALUE lit = rb_node_imaginary_literal_val(node);
11237 debugp_param(
"imaginary", lit);
11239 ADD_INSN1(ret, node, putobject, lit);
11246 debugp_param(
"nd_lit", get_string_value(node));
11248 VALUE lit = get_string_value(node);
11251 option->frozen_string_literal != ISEQ_FROZEN_STRING_LITERAL_DISABLED) {
11252 lit = rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), line);
11253 RB_OBJ_SET_SHAREABLE(lit);
11255 switch (option->frozen_string_literal) {
11256 case ISEQ_FROZEN_STRING_LITERAL_UNSET:
11257 ADD_INSN1(ret, node, putchilledstring, lit);
11259 case ISEQ_FROZEN_STRING_LITERAL_DISABLED:
11260 ADD_INSN1(ret, node, putstring, lit);
11262 case ISEQ_FROZEN_STRING_LITERAL_ENABLED:
11263 ADD_INSN1(ret, node, putobject, lit);
11266 rb_bug(
"invalid frozen_string_literal");
11273 compile_dstr(iseq, ret, node);
11276 ADD_INSN(ret, node, pop);
11281 ADD_CALL_RECEIVER(ret, node);
11282 VALUE str = rb_node_str_string_val(node);
11283 ADD_INSN1(ret, node, putobject, str);
11285 ADD_CALL(ret, node, idBackquote,
INT2FIX(1));
11288 ADD_INSN(ret, node, pop);
11293 ADD_CALL_RECEIVER(ret, node);
11294 compile_dstr(iseq, ret, node);
11295 ADD_CALL(ret, node, idBackquote,
INT2FIX(1));
11298 ADD_INSN(ret, node, pop);
11303 CHECK(compile_evstr(iseq, ret, RNODE_EVSTR(node)->nd_body, popped));
11307 VALUE lit = rb_node_regx_string_val(node);
11308 RB_OBJ_SET_SHAREABLE(lit);
11309 ADD_INSN1(ret, node, putobject, lit);
11315 compile_dregx(iseq, ret, node, popped);
11318 int ic_index = body->ise_size++;
11320 block_iseq = NEW_CHILD_ISEQ(RNODE_ONCE(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_PLAIN, line);
11322 ADD_INSN2(ret, node, once, block_iseq,
INT2FIX(ic_index));
11326 ADD_INSN(ret, node, pop);
11330 case NODE_ARGSCAT:{
11332 CHECK(COMPILE(ret,
"argscat head", RNODE_ARGSCAT(node)->nd_head));
11333 ADD_INSN1(ret, node, splatarray,
Qfalse);
11334 ADD_INSN(ret, node, pop);
11335 CHECK(COMPILE(ret,
"argscat body", RNODE_ARGSCAT(node)->nd_body));
11336 ADD_INSN1(ret, node, splatarray,
Qfalse);
11337 ADD_INSN(ret, node, pop);
11340 CHECK(COMPILE(ret,
"argscat head", RNODE_ARGSCAT(node)->nd_head));
11341 const NODE *body_node = RNODE_ARGSCAT(node)->nd_body;
11342 if (nd_type_p(body_node, NODE_LIST)) {
11343 CHECK(compile_array(iseq, ret, body_node, popped, FALSE) >= 0);
11346 CHECK(COMPILE(ret,
"argscat body", body_node));
11347 ADD_INSN(ret, node, concattoarray);
11352 case NODE_ARGSPUSH:{
11354 CHECK(COMPILE(ret,
"argspush head", RNODE_ARGSPUSH(node)->nd_head));
11355 ADD_INSN1(ret, node, splatarray,
Qfalse);
11356 ADD_INSN(ret, node, pop);
11357 CHECK(COMPILE_(ret,
"argspush body", RNODE_ARGSPUSH(node)->nd_body, popped));
11360 CHECK(COMPILE(ret,
"argspush head", RNODE_ARGSPUSH(node)->nd_head));
11361 const NODE *body_node = RNODE_ARGSPUSH(node)->nd_body;
11362 if (keyword_node_p(body_node)) {
11363 CHECK(COMPILE_(ret,
"array element", body_node, FALSE));
11364 ADD_INSN(ret, node, pushtoarraykwsplat);
11366 else if (static_literal_node_p(body_node, iseq,
false)) {
11367 ADD_INSN1(ret, body_node, putobject, static_literal_value(body_node, iseq));
11368 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
11371 CHECK(COMPILE_(ret,
"array element", body_node, FALSE));
11372 ADD_INSN1(ret, node, pushtoarray,
INT2FIX(1));
11378 CHECK(COMPILE(ret,
"splat", RNODE_SPLAT(node)->nd_head));
11379 ADD_INSN1(ret, node, splatarray,
Qtrue);
11382 ADD_INSN(ret, node, pop);
11387 ID mid = RNODE_DEFN(node)->nd_mid;
11388 const rb_iseq_t *method_iseq = NEW_ISEQ(RNODE_DEFN(node)->nd_defn,
11390 ISEQ_TYPE_METHOD, line);
11392 debugp_param(
"defn/iseq", rb_iseqw_new(method_iseq));
11393 ADD_INSN2(ret, node, definemethod,
ID2SYM(mid), method_iseq);
11397 ADD_INSN1(ret, node, putobject,
ID2SYM(mid));
11403 ID mid = RNODE_DEFS(node)->nd_mid;
11404 const rb_iseq_t * singleton_method_iseq = NEW_ISEQ(RNODE_DEFS(node)->nd_defn,
11406 ISEQ_TYPE_METHOD, line);
11408 debugp_param(
"defs/iseq", rb_iseqw_new(singleton_method_iseq));
11409 CHECK(COMPILE(ret,
"defs: recv", RNODE_DEFS(node)->nd_recv));
11410 ADD_INSN2(ret, node, definesmethod,
ID2SYM(mid), singleton_method_iseq);
11414 ADD_INSN1(ret, node, putobject,
ID2SYM(mid));
11419 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11420 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11421 CHECK(COMPILE(ret,
"alias arg1", RNODE_ALIAS(node)->nd_1st));
11422 CHECK(COMPILE(ret,
"alias arg2", RNODE_ALIAS(node)->nd_2nd));
11423 ADD_SEND(ret, node, id_core_set_method_alias,
INT2FIX(3));
11426 ADD_INSN(ret, node, pop);
11431 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11432 ADD_INSN1(ret, node, putobject,
ID2SYM(RNODE_VALIAS(node)->nd_alias));
11433 ADD_INSN1(ret, node, putobject,
ID2SYM(RNODE_VALIAS(node)->nd_orig));
11434 ADD_SEND(ret, node, id_core_set_variable_alias,
INT2FIX(2));
11437 ADD_INSN(ret, node, pop);
11444 for (
long i = 0; i < ary->len; i++) {
11445 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11446 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11447 CHECK(COMPILE(ret,
"undef arg", ary->data[i]));
11448 ADD_SEND(ret, node, id_core_undef_method,
INT2FIX(2));
11450 if (i < ary->
len - 1) {
11451 ADD_INSN(ret, node, pop);
11456 ADD_INSN(ret, node, pop);
11461 const rb_iseq_t *class_iseq = NEW_CHILD_ISEQ(RNODE_CLASS(node)->nd_body,
11462 rb_str_freeze(rb_sprintf(
"<class:%"PRIsVALUE
">", rb_id2str(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)))),
11463 ISEQ_TYPE_CLASS, line);
11464 const int flags = VM_DEFINECLASS_TYPE_CLASS |
11465 (RNODE_CLASS(node)->nd_super ? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS : 0) |
11466 compile_cpath(ret, iseq, RNODE_CLASS(node)->nd_cpath);
11468 CHECK(COMPILE(ret,
"super", RNODE_CLASS(node)->nd_super));
11469 ADD_INSN3(ret, node, defineclass,
ID2SYM(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)), class_iseq,
INT2FIX(flags));
11473 ADD_INSN(ret, node, pop);
11478 const rb_iseq_t *module_iseq = NEW_CHILD_ISEQ(RNODE_MODULE(node)->nd_body,
11479 rb_str_freeze(rb_sprintf(
"<module:%"PRIsVALUE
">", rb_id2str(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)))),
11480 ISEQ_TYPE_CLASS, line);
11481 const int flags = VM_DEFINECLASS_TYPE_MODULE |
11482 compile_cpath(ret, iseq, RNODE_MODULE(node)->nd_cpath);
11484 ADD_INSN (ret, node, putnil);
11485 ADD_INSN3(ret, node, defineclass,
ID2SYM(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)), module_iseq,
INT2FIX(flags));
11489 ADD_INSN(ret, node, pop);
11495 const rb_iseq_t *singleton_class = NEW_ISEQ(RNODE_SCLASS(node)->nd_body, rb_fstring_lit(
"singleton class"),
11496 ISEQ_TYPE_CLASS, line);
11498 CHECK(COMPILE(ret,
"sclass#recv", RNODE_SCLASS(node)->nd_recv));
11499 ADD_INSN (ret, node, putnil);
11500 CONST_ID(singletonclass,
"singletonclass");
11504 int sclass_flags = VM_DEFINECLASS_TYPE_SINGLETON_CLASS;
11505 const NODE *recv = RNODE_SCLASS(node)->nd_recv;
11506 if (!(nd_type_p(recv, NODE_SELF) &&
11507 ISEQ_BODY(iseq)->
type == ISEQ_TYPE_CLASS) &&
11508 !cpath_const_p(recv)) {
11509 sclass_flags |= VM_DEFINECLASS_FLAG_DYNAMIC_CREF;
11512 ADD_INSN3(ret, node, defineclass,
11513 ID2SYM(singletonclass), singleton_class,
11518 ADD_INSN(ret, node, pop);
11523 CHECK(compile_colon2(iseq, ret, node, popped));
11526 CHECK(compile_colon3(iseq, ret, node, popped));
11529 CHECK(compile_dots(iseq, ret, node, popped, FALSE));
11532 CHECK(compile_dots(iseq, ret, node, popped, TRUE));
11536 LABEL *lend = NEW_LABEL(line);
11537 LABEL *ltrue = NEW_LABEL(line);
11538 LABEL *lfalse = NEW_LABEL(line);
11539 CHECK(compile_flip_flop(iseq, ret, node,
type == NODE_FLIP2,
11541 ADD_LABEL(ret, ltrue);
11542 ADD_INSN1(ret, node, putobject,
Qtrue);
11543 ADD_INSNL(ret, node, jump, lend);
11544 ADD_LABEL(ret, lfalse);
11545 ADD_INSN1(ret, node, putobject,
Qfalse);
11546 ADD_LABEL(ret, lend);
11551 ADD_INSN(ret, node, putself);
11557 ADD_INSN(ret, node, putnil);
11563 ADD_INSN1(ret, node, putobject,
Qtrue);
11569 ADD_INSN1(ret, node, putobject,
Qfalse);
11574 CHECK(compile_errinfo(iseq, ret, node, popped));
11578 CHECK(compile_defined_expr(iseq, ret, node,
Qtrue,
false));
11581 case NODE_POSTEXE:{
11585 int is_index = body->ise_size++;
11587 rb_iseq_new_with_callback_new_callback(build_postexe_iseq, RNODE_POSTEXE(node)->nd_body);
11589 NEW_CHILD_ISEQ_WITH_CALLBACK(ifunc, rb_fstring(make_name_for_block(iseq)), ISEQ_TYPE_BLOCK, line);
11591 ADD_INSN2(ret, node, once, once_iseq,
INT2FIX(is_index));
11595 ADD_INSN(ret, node, pop);
11600 CHECK(compile_kw_arg(iseq, ret, node, popped));
11603 compile_dstr(iseq, ret, node);
11605 ADD_INSN(ret, node, intern);
11608 ADD_INSN(ret, node, pop);
11612 case NODE_ATTRASGN:
11613 CHECK(compile_attrasgn(iseq, ret, node, popped));
11617 const rb_iseq_t *block = NEW_CHILD_ISEQ(RNODE_LAMBDA(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, line);
11620 ADD_INSN1(ret, node, putspecialobject,
INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11621 ADD_CALL_WITH_BLOCK(ret, node, idLambda, argc, block);
11625 ADD_INSN(ret, node, pop);
11630 UNKNOWN_NODE(
"iseq_compile_each", node, COMPILE_NG);
11645insn_data_length(
INSN *iobj)
11647 return insn_len(iobj->insn_id);
11651calc_sp_depth(
int depth,
INSN *insn)
11653 return comptime_insn_stack_increase(depth, insn->insn_id, insn->operands);
11657opobj_inspect(
VALUE obj)
11677insn_data_to_s_detail(
INSN *iobj)
11679 VALUE str = rb_sprintf(
"%-20s ", insn_name(iobj->insn_id));
11681 if (iobj->operands) {
11682 const char *types = insn_op_types(iobj->insn_id);
11685 for (j = 0; types[j]; j++) {
11686 char type = types[j];
11692 rb_str_catf(str, LABEL_FORMAT, lobj->label_no);
11710 VALUE v = OPERAND_AT(iobj, j);
11725 rb_str_catf(str,
"<ivc:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11728 rb_str_catf(str,
"<icvarc:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11731 rb_str_catf(str,
"<ise:%d>",
FIX2INT(OPERAND_AT(iobj, j)));
11737 if (vm_ci_mid(ci)) rb_str_catf(str,
"%"PRIsVALUE, rb_id2str(vm_ci_mid(ci)));
11738 rb_str_catf(str,
", %d>", vm_ci_argc(ci));
11746 void *func = (
void *)OPERAND_AT(iobj, j);
11749 if (dladdr(func, &info) && info.dli_sname) {
11754 rb_str_catf(str,
"<%p>", func);
11764 if (types[j + 1]) {
11775 dump_disasm_list_with_cursor(link, NULL, NULL);
11786 printf(
"-- raw disasm--------\n");
11789 if (curr) printf(curr == link ?
"*" :
" ");
11790 switch (link->type) {
11791 case ISEQ_ELEMENT_INSN:
11793 iobj = (
INSN *)link;
11794 str = insn_data_to_s_detail(iobj);
11795 printf(
" %04d %-65s(%4u)\n", pos,
StringValueCStr(str), iobj->insn_info.line_no);
11796 pos += insn_data_length(iobj);
11799 case ISEQ_ELEMENT_LABEL:
11801 lobj = (
LABEL *)link;
11802 printf(LABEL_FORMAT
" [sp: %d, unremovable: %d, refcnt: %d]%s\n", lobj->label_no, lobj->sp, lobj->unremovable, lobj->refcnt,
11803 dest == lobj ?
" <---" :
"");
11806 case ISEQ_ELEMENT_TRACE:
11809 printf(
" trace: %0x\n", trace->event);
11812 case ISEQ_ELEMENT_ADJUST:
11815 printf(
" adjust: [label: %d]\n", adjust->label ? adjust->label->label_no : -1);
11820 rb_raise(
rb_eSyntaxError,
"dump_disasm_list error: %d\n", (
int)link->type);
11824 printf(
"---------------------\n");
11829rb_insn_len(
VALUE insn)
11831 return insn_len(insn);
11835rb_insns_name(
int i)
11837 return insn_name(i);
11841rb_insns_name_array(
void)
11845 for (i = 0; i < VM_INSTRUCTION_SIZE; i++) {
11856 obj = rb_to_symbol_type(obj);
11858 if (st_lookup(labels_table, obj, &tmp) == 0) {
11859 label = NEW_LABEL(0);
11860 st_insert(labels_table, obj, (st_data_t)label);
11863 label = (
LABEL *)tmp;
11870get_exception_sym2type(
VALUE sym)
11872 static VALUE symRescue, symEnsure, symRetry;
11873 static VALUE symBreak, symRedo, symNext;
11875 if (symRescue == 0) {
11884 if (sym == symRescue)
return CATCH_TYPE_RESCUE;
11885 if (sym == symEnsure)
return CATCH_TYPE_ENSURE;
11886 if (sym == symRetry)
return CATCH_TYPE_RETRY;
11887 if (sym == symBreak)
return CATCH_TYPE_BREAK;
11888 if (sym == symRedo)
return CATCH_TYPE_REDO;
11889 if (sym == symNext)
return CATCH_TYPE_NEXT;
11890 rb_raise(
rb_eSyntaxError,
"invalid exception symbol: %+"PRIsVALUE, sym);
11903 LABEL *lstart, *lend, *lcont;
11918 lstart = register_label(iseq, labels_table,
RARRAY_AREF(v, 2));
11919 lend = register_label(iseq, labels_table,
RARRAY_AREF(v, 3));
11920 lcont = register_label(iseq, labels_table,
RARRAY_AREF(v, 4));
11924 if (
type == CATCH_TYPE_RESCUE ||
11925 type == CATCH_TYPE_BREAK ||
11926 type == CATCH_TYPE_NEXT) {
11932 ADD_CATCH_ENTRY(
type, lstart, lend, eiseq, lcont);
11940insn_make_insn_table(
void)
11944 table = st_init_numtable_with_size(VM_INSTRUCTION_SIZE);
11946 for (i=0; i<VM_INSTRUCTION_SIZE; i++) {
11960 iseqw = rb_iseq_load(op, (
VALUE)iseq,
Qnil);
11962 else if (
CLASS_OF(op) == rb_cISeq) {
11969 loaded_iseq = rb_iseqw_to_iseq(iseqw);
11970 return loaded_iseq;
11978 unsigned int flag = 0;
11989 if (!
NIL_P(vorig_argc)) orig_argc =
FIX2INT(vorig_argc);
11991 if (!
NIL_P(vkw_arg)) {
11994 size_t n = rb_callinfo_kwarg_bytes(
len);
11997 kw_arg->references = 0;
11998 kw_arg->keyword_len =
len;
11999 for (i = 0; i <
len; i++) {
12002 kw_arg->keywords[i] = kw;
12007 const struct rb_callinfo *ci = new_callinfo(iseq, mid, orig_argc, flag, kw_arg, (flag & VM_CALL_ARGS_SIMPLE) == 0);
12013event_name_to_flag(
VALUE sym)
12015#define CHECK_EVENT(ev) if (sym == ID2SYM(rb_intern_const(#ev))) return ev;
12036 int line_no = 0, node_id = -1, insn_idx = 0;
12037 int ret = COMPILE_OK;
12042 static struct st_table *insn_table;
12044 if (insn_table == 0) {
12045 insn_table = insn_make_insn_table();
12048 for (i=0; i<
len; i++) {
12054 ADD_TRACE(anchor, event);
12057 LABEL *label = register_label(iseq, labels_table, obj);
12058 ADD_LABEL(anchor, label);
12075 if (st_lookup(insn_table, (st_data_t)insn, &insn_id) == 0) {
12077 COMPILE_ERROR(iseq, line_no,
12078 "unknown instruction: %+"PRIsVALUE, insn);
12083 if (argc != insn_len((
VALUE)insn_id)-1) {
12084 COMPILE_ERROR(iseq, line_no,
12085 "operand size mismatch");
12091 argv = compile_data_calloc2_type(iseq,
VALUE, argc);
12096 (
enum ruby_vminsn_type)insn_id, argc, argv));
12098 for (j=0; j<argc; j++) {
12100 switch (insn_op_type((
VALUE)insn_id, j)) {
12102 LABEL *label = register_label(iseq, labels_table, op);
12103 argv[j] = (
VALUE)label;
12118 VALUE v = (
VALUE)iseq_build_load_iseq(iseq, op);
12129 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->ise_size) {
12130 ISEQ_BODY(iseq)->ise_size =
NUM2INT(op) + 1;
12136 op = rb_to_array_type(op);
12140 sym = rb_to_symbol_type(sym);
12145 argv[j] = segments;
12147 ISEQ_BODY(iseq)->ic_size++;
12152 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->ivc_size) {
12153 ISEQ_BODY(iseq)->ivc_size =
NUM2INT(op) + 1;
12158 if (
NUM2UINT(op) >= ISEQ_BODY(iseq)->icvarc_size) {
12159 ISEQ_BODY(iseq)->icvarc_size =
NUM2INT(op) + 1;
12163 argv[j] = iseq_build_callinfo_from_hash(iseq, op);
12166 argv[j] = rb_to_symbol_type(op);
12173 RHASH_TBL_RAW(map)->type = &cdhash_type;
12174 op = rb_to_array_type(op);
12179 register_label(iseq, labels_table, sym);
12180 rb_hash_aset(map, key, (
VALUE)label | 1);
12190#if SIZEOF_VALUE <= SIZEOF_LONG
12195 argv[j] = (
VALUE)funcptr;
12206 (
enum ruby_vminsn_type)insn_id, argc, NULL));
12210 rb_raise(
rb_eTypeError,
"unexpected object for instruction");
12215 validate_labels(iseq, labels_table);
12216 if (!ret)
return ret;
12217 return iseq_setup(iseq, anchor);
12220#define CHECK_ARRAY(v) rb_to_array_type(v)
12221#define CHECK_SYMBOL(v) rb_to_symbol_type(v)
12226 VALUE val = rb_hash_aref(param, sym);
12231 else if (!
NIL_P(val)) {
12232 rb_raise(
rb_eTypeError,
"invalid %+"PRIsVALUE
" Fixnum: %+"PRIsVALUE,
12238static const struct rb_iseq_param_keyword *
12244 VALUE key, sym, default_val;
12247 struct rb_iseq_param_keyword *keyword =
ZALLOC(
struct rb_iseq_param_keyword);
12249 ISEQ_BODY(iseq)->param.flags.has_kw = TRUE;
12251 keyword->num =
len;
12252#define SYM(s) ID2SYM(rb_intern_const(#s))
12253 (void)int_param(&keyword->bits_start, params, SYM(kwbits));
12254 i = keyword->bits_start - keyword->num;
12255 ids = (
ID *)&ISEQ_BODY(iseq)->local_table[i];
12259 for (i = 0; i <
len; i++) {
12263 goto default_values;
12266 keyword->required_num++;
12270 default_len =
len - i;
12271 if (default_len == 0) {
12272 keyword->table = ids;
12275 else if (default_len < 0) {
12281 for (j = 0; i <
len; i++, j++) {
12295 rb_raise(
rb_eTypeError,
"keyword default has unsupported len %+"PRIsVALUE, key);
12301 keyword->table = ids;
12302 keyword->default_values = dvs;
12308iseq_insn_each_object_mark_and_move(
VALUE * obj,
VALUE _)
12310 rb_gc_mark_and_move(obj);
12317 size_t size =
sizeof(
INSN);
12318 size_t align = ALIGNMENT_SIZE_OF(
INSN);
12319 unsigned int pos = 0;
12322 size_t padding = calc_padding((
void *)&storage->buff[pos], align);
12323 size_t offset = pos + size + padding;
12324 if (offset > storage->size || offset > storage->pos) {
12326 storage = storage->next;
12329 pos += (int)padding;
12331 iobj = (
INSN *)&storage->buff[pos];
12333 if (iobj->operands) {
12334 iseq_insn_each_markable_object(iobj, iseq_insn_each_object_mark_and_move, (
VALUE)0);
12354#define SYM(s) ID2SYM(rb_intern_const(#s))
12356 unsigned int arg_size, local_size, stack_max;
12358 struct st_table *labels_table = st_init_numtable();
12360 VALUE arg_opt_labels = rb_hash_aref(params, SYM(opt));
12361 VALUE keywords = rb_hash_aref(params, SYM(keyword));
12363 DECL_ANCHOR(anchor);
12364 INIT_ANCHOR(anchor);
12367 ISEQ_BODY(iseq)->local_table_size =
len;
12368 ISEQ_BODY(iseq)->local_table = tbl =
len > 0 ? (
ID *)
ALLOC_N(
ID, ISEQ_BODY(iseq)->local_table_size) : NULL;
12370 for (i = 0; i <
len; i++) {
12373 if (sym_arg_rest == lv) {
12381#define INT_PARAM(F) int_param(&ISEQ_BODY(iseq)->param.F, params, SYM(F))
12382 if (INT_PARAM(lead_num)) {
12383 ISEQ_BODY(iseq)->param.flags.has_lead = TRUE;
12385 if (INT_PARAM(post_num)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12386 if (INT_PARAM(post_start)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12387 if (INT_PARAM(rest_start)) ISEQ_BODY(iseq)->param.flags.has_rest = TRUE;
12388 if (INT_PARAM(block_start)) ISEQ_BODY(iseq)->param.flags.has_block = TRUE;
12391#define INT_PARAM(F) F = (int_param(&x, misc, SYM(F)) ? (unsigned int)x : 0)
12393 INT_PARAM(arg_size);
12394 INT_PARAM(local_size);
12395 INT_PARAM(stack_max);
12400#ifdef USE_ISEQ_NODE_ID
12401 node_ids = rb_hash_aref(misc,
ID2SYM(rb_intern(
"node_ids")));
12409 ISEQ_BODY(iseq)->param.flags.has_opt = !!(
len - 1 >= 0);
12411 if (ISEQ_BODY(iseq)->param.flags.has_opt) {
12414 for (i = 0; i <
len; i++) {
12416 LABEL *label = register_label(iseq, labels_table, ent);
12417 opt_table[i] = (
VALUE)label;
12420 ISEQ_BODY(iseq)->param.opt_num =
len - 1;
12421 ISEQ_BODY(iseq)->param.opt_table = opt_table;
12424 else if (!
NIL_P(arg_opt_labels)) {
12425 rb_raise(
rb_eTypeError,
":opt param is not an array: %+"PRIsVALUE,
12430 ISEQ_BODY(iseq)->param.keyword = iseq_build_kw(iseq, params, keywords);
12432 else if (!
NIL_P(keywords)) {
12433 rb_raise(
rb_eTypeError,
":keywords param is not an array: %+"PRIsVALUE,
12437 if (
Qtrue == rb_hash_aref(params, SYM(ambiguous_param0))) {
12438 ISEQ_BODY(iseq)->param.flags.ambiguous_param0 = TRUE;
12441 if (
Qtrue == rb_hash_aref(params, SYM(use_block))) {
12442 ISEQ_BODY(iseq)->param.flags.use_block = TRUE;
12445 if (int_param(&i, params, SYM(kwrest))) {
12446 struct rb_iseq_param_keyword *keyword = (
struct rb_iseq_param_keyword *)ISEQ_BODY(iseq)->param.keyword;
12447 if (keyword == NULL) {
12448 ISEQ_BODY(iseq)->param.keyword = keyword =
ZALLOC(
struct rb_iseq_param_keyword);
12450 keyword->rest_start = i;
12451 ISEQ_BODY(iseq)->param.flags.has_kwrest = TRUE;
12454 iseq_calc_param_size(iseq);
12457 iseq_build_from_ary_exception(iseq, labels_table, exception);
12460 iseq_build_from_ary_body(iseq, anchor, body, node_ids, labels_wrapper);
12462 ISEQ_BODY(iseq)->param.size = arg_size;
12463 ISEQ_BODY(iseq)->local_table_size = local_size;
12464 ISEQ_BODY(iseq)->stack_max = stack_max;
12474 while (body->type == ISEQ_TYPE_BLOCK ||
12475 body->type == ISEQ_TYPE_RESCUE ||
12476 body->type == ISEQ_TYPE_ENSURE ||
12477 body->type == ISEQ_TYPE_EVAL ||
12478 body->type == ISEQ_TYPE_MAIN
12482 for (i = 0; i < body->local_table_size; i++) {
12483 if (body->local_table[i] ==
id) {
12487 iseq = body->parent_iseq;
12488 body = ISEQ_BODY(iseq);
12501 for (i=0; i<body->local_table_size; i++) {
12502 if (body->local_table[i] ==
id) {
12512#ifndef IBF_ISEQ_DEBUG
12513#define IBF_ISEQ_DEBUG 0
12516#ifndef IBF_ISEQ_ENABLE_LOCAL_BUFFER
12517#define IBF_ISEQ_ENABLE_LOCAL_BUFFER 0
12520typedef uint32_t ibf_offset_t;
12521#define IBF_OFFSET(ptr) ((ibf_offset_t)(VALUE)(ptr))
12523#define IBF_MAJOR_VERSION ISEQ_MAJOR_VERSION
12525#define IBF_DEVEL_VERSION 5
12526#define IBF_MINOR_VERSION (ISEQ_MINOR_VERSION * 10000 + IBF_DEVEL_VERSION)
12528#define IBF_MINOR_VERSION ISEQ_MINOR_VERSION
12531static const char IBF_ENDIAN_MARK =
12532#ifdef WORDS_BIGENDIAN
12541 uint32_t major_version;
12542 uint32_t minor_version;
12544 uint32_t extra_size;
12546 uint32_t iseq_list_size;
12547 uint32_t global_object_list_size;
12548 ibf_offset_t iseq_list_offset;
12549 ibf_offset_t global_object_list_offset;
12570 unsigned int obj_list_size;
12571 ibf_offset_t obj_list_offset;
12590pinned_list_mark(
void *ptr)
12594 for (i = 0; i < list->size; i++) {
12595 if (list->buffer[i]) {
12596 rb_gc_mark(list->buffer[i]);
12612pinned_list_fetch(
VALUE list,
long offset)
12618 if (offset >= ptr->size) {
12619 rb_raise(
rb_eIndexError,
"object index out of range: %ld", offset);
12622 return ptr->buffer[offset];
12626pinned_list_store(
VALUE list,
long offset,
VALUE object)
12632 if (offset >= ptr->size) {
12633 rb_raise(
rb_eIndexError,
"object index out of range: %ld", offset);
12640pinned_list_new(
long size)
12642 size_t memsize = offsetof(
struct pinned_list, buffer) + size *
sizeof(
VALUE);
12643 VALUE obj_list = rb_data_typed_object_zalloc(0, memsize, &pinned_list_type);
12644 struct pinned_list * ptr = RTYPEDDATA_GET_DATA(obj_list);
12650ibf_dump_pos(
struct ibf_dump *dump)
12652 long pos = RSTRING_LEN(dump->current_buffer->str);
12653#if SIZEOF_LONG > SIZEOF_INT
12654 if (pos >= UINT_MAX) {
12658 return (
unsigned int)pos;
12662ibf_dump_align(
struct ibf_dump *dump,
size_t align)
12664 ibf_offset_t pos = ibf_dump_pos(dump);
12666 static const char padding[
sizeof(
VALUE)];
12667 size_t size = align - ((size_t)pos % align);
12668#if SIZEOF_LONG > SIZEOF_INT
12669 if (pos + size >= UINT_MAX) {
12673 for (; size >
sizeof(padding); size -=
sizeof(padding)) {
12674 rb_str_cat(dump->current_buffer->str, padding,
sizeof(padding));
12676 rb_str_cat(dump->current_buffer->str, padding, size);
12681ibf_dump_write(
struct ibf_dump *dump,
const void *buff,
unsigned long size)
12683 ibf_offset_t pos = ibf_dump_pos(dump);
12684#if SIZEOF_LONG > SIZEOF_INT
12686 if (size >= UINT_MAX || pos + size >= UINT_MAX) {
12690 rb_str_cat(dump->current_buffer->str, (
const char *)buff, size);
12695ibf_dump_write_byte(
struct ibf_dump *dump,
unsigned char byte)
12697 return ibf_dump_write(dump, &
byte,
sizeof(
unsigned char));
12701ibf_dump_overwrite(
struct ibf_dump *dump,
void *buff,
unsigned int size,
long offset)
12703 VALUE str = dump->current_buffer->str;
12704 char *ptr = RSTRING_PTR(str);
12705 if ((
unsigned long)(size + offset) > (
unsigned long)RSTRING_LEN(str))
12706 rb_bug(
"ibf_dump_overwrite: overflow");
12707 memcpy(ptr + offset, buff, size);
12711ibf_load_ptr(
const struct ibf_load *load, ibf_offset_t *offset,
int size)
12713 ibf_offset_t beg = *offset;
12715 return load->current_buffer->buff + beg;
12719ibf_load_alloc(
const struct ibf_load *load, ibf_offset_t offset,
size_t x,
size_t y)
12721 void *buff = ruby_xmalloc2(x, y);
12722 size_t size = x * y;
12723 memcpy(buff, load->current_buffer->buff + offset, size);
12727#define IBF_W_ALIGN(type) (RUBY_ALIGNOF(type) > 1 ? ibf_dump_align(dump, RUBY_ALIGNOF(type)) : (void)0)
12729#define IBF_W(b, type, n) (IBF_W_ALIGN(type), (type *)(VALUE)IBF_WP(b, type, n))
12730#define IBF_WV(variable) ibf_dump_write(dump, &(variable), sizeof(variable))
12731#define IBF_WP(b, type, n) ibf_dump_write(dump, (b), sizeof(type) * (n))
12732#define IBF_R(val, type, n) (type *)ibf_load_alloc(load, IBF_OFFSET(val), sizeof(type), (n))
12733#define IBF_ZERO(variable) memset(&(variable), 0, sizeof(variable))
12736ibf_table_lookup(
struct st_table *table, st_data_t key)
12740 if (st_lookup(table, key, &val)) {
12749ibf_table_find_or_insert(
struct st_table *table, st_data_t key)
12751 int index = ibf_table_lookup(table, key);
12754 index = (int)table->num_entries;
12755 st_insert(table, key, (st_data_t)index);
12763static void ibf_dump_object_list(
struct ibf_dump *dump, ibf_offset_t *obj_list_offset,
unsigned int *obj_list_size);
12769ibf_dump_object_table_new(
void)
12771 st_table *obj_table = st_init_numtable();
12772 st_insert(obj_table, (st_data_t)
Qnil, (st_data_t)0);
12780 return ibf_table_find_or_insert(dump->current_buffer->obj_table, (st_data_t)obj);
12786 if (
id == 0 || rb_id2name(
id) == NULL) {
12789 return ibf_dump_object(dump,
rb_id2sym(
id));
12793ibf_load_id(
const struct ibf_load *load,
const ID id_index)
12795 if (id_index == 0) {
12798 VALUE sym = ibf_load_object(load, id_index);
12808static ibf_offset_t ibf_dump_iseq_each(
struct ibf_dump *dump,
const rb_iseq_t *iseq);
12813 if (iseq == NULL) {
12817 return ibf_table_find_or_insert(dump->iseq_table, (st_data_t)iseq);
12821static unsigned char
12822ibf_load_byte(
const struct ibf_load *load, ibf_offset_t *offset)
12824 if (*offset >= load->current_buffer->size) { rb_raise(
rb_eRuntimeError,
"invalid bytecode"); }
12825 return (
unsigned char)load->current_buffer->buff[(*offset)++];
12841 if (
sizeof(
VALUE) > 8 || CHAR_BIT != 8) {
12842 ibf_dump_write(dump, &x,
sizeof(
VALUE));
12846 enum { max_byte_length =
sizeof(
VALUE) + 1 };
12848 unsigned char bytes[max_byte_length];
12851 for (n = 0; n <
sizeof(
VALUE) && (x >> (7 - n)); n++, x >>= 8) {
12852 bytes[max_byte_length - 1 - n] = (
unsigned char)x;
12858 bytes[max_byte_length - 1 - n] = (
unsigned char)x;
12861 ibf_dump_write(dump, bytes + max_byte_length - n, n);
12865ibf_load_small_value(
const struct ibf_load *load, ibf_offset_t *offset)
12867 if (
sizeof(
VALUE) > 8 || CHAR_BIT != 8) {
12868 union {
char s[
sizeof(
VALUE)];
VALUE v; } x;
12870 memcpy(x.s, load->current_buffer->buff + *offset,
sizeof(
VALUE));
12871 *offset +=
sizeof(
VALUE);
12876 enum { max_byte_length =
sizeof(
VALUE) + 1 };
12878 const unsigned char *buffer = (
const unsigned char *)load->current_buffer->buff;
12879 const unsigned char c = buffer[*offset];
12883 c == 0 ? 9 : ntz_int32(c) + 1;
12886 if (*offset + n > load->current_buffer->size) {
12891 for (i = 1; i < n; i++) {
12893 x |= (
VALUE)buffer[*offset + i];
12907 ibf_dump_write_small_value(dump, (
VALUE)bf->index);
12909 size_t len = strlen(bf->name);
12910 ibf_dump_write_small_value(dump, (
VALUE)
len);
12911 ibf_dump_write(dump, bf->name,
len);
12915ibf_load_builtin(
const struct ibf_load *load, ibf_offset_t *offset)
12917 int i = (int)ibf_load_small_value(load, offset);
12918 int len = (int)ibf_load_small_value(load, offset);
12919 const char *name = (
char *)ibf_load_ptr(load, offset,
len);
12922 fprintf(stderr,
"%.*s!!\n",
len, name);
12926 if (table == NULL) rb_raise(rb_eArgError,
"builtin function table is not provided");
12927 if (strncmp(table[i].name, name,
len) != 0) {
12928 rb_raise(rb_eArgError,
"builtin function index (%d) mismatch (expect %s but %s)", i, name, table[i].name);
12939 const int iseq_size = body->iseq_size;
12941 const VALUE *orig_code = rb_iseq_original_iseq(iseq);
12943 ibf_offset_t offset = ibf_dump_pos(dump);
12945 for (code_index=0; code_index<iseq_size;) {
12946 const VALUE insn = orig_code[code_index++];
12947 const char *types = insn_op_types(insn);
12952 ibf_dump_write_small_value(dump, insn);
12955 for (op_index=0; types[op_index]; op_index++, code_index++) {
12956 VALUE op = orig_code[code_index];
12959 switch (types[op_index]) {
12962 wv = ibf_dump_object(dump, op);
12971 wv = ibf_dump_object(dump, arr);
12979 wv = is - ISEQ_IS_ENTRY_START(body, types[op_index]);
12987 wv = ibf_dump_id(dump, (
ID)op);
12999 ibf_dump_write_small_value(dump, wv);
13009ibf_load_code(
const struct ibf_load *load,
rb_iseq_t *iseq, ibf_offset_t bytecode_offset, ibf_offset_t bytecode_size,
unsigned int iseq_size)
13012 unsigned int code_index;
13013 ibf_offset_t reading_pos = bytecode_offset;
13017 struct rb_call_data *cd_entries = load_body->call_data;
13020 load_body->iseq_encoded = code;
13021 load_body->iseq_size = 0;
13023 iseq_bits_t * mark_offset_bits;
13025 iseq_bits_t tmp[1] = {0};
13027 if (ISEQ_MBITS_BUFLEN(iseq_size) == 1) {
13028 mark_offset_bits = tmp;
13031 mark_offset_bits =
ZALLOC_N(iseq_bits_t, ISEQ_MBITS_BUFLEN(iseq_size));
13033 bool needs_bitmap =
false;
13035 for (code_index=0; code_index<iseq_size;) {
13037 const VALUE insn = code[code_index] = ibf_load_small_value(load, &reading_pos);
13038 const char *types = insn_op_types(insn);
13044 for (op_index=0; types[op_index]; op_index++, code_index++) {
13045 const char operand_type = types[op_index];
13046 switch (operand_type) {
13049 VALUE op = ibf_load_small_value(load, &reading_pos);
13050 VALUE v = ibf_load_object(load, op);
13051 code[code_index] = v;
13054 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13055 needs_bitmap =
true;
13061 VALUE op = ibf_load_small_value(load, &reading_pos);
13062 VALUE v = ibf_load_object(load, op);
13063 v = rb_hash_dup(v);
13064 RHASH_TBL_RAW(v)->type = &cdhash_type;
13066 RB_OBJ_SET_SHAREABLE(freeze_hide_obj(v));
13071 pinned_list_store(load->current_buffer->obj_list, (
long)op, v);
13073 code[code_index] = v;
13074 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13076 needs_bitmap =
true;
13081 VALUE op = (
VALUE)ibf_load_small_value(load, &reading_pos);
13083 code[code_index] = v;
13086 ISEQ_MBITS_SET(mark_offset_bits, code_index);
13087 needs_bitmap =
true;
13093 VALUE op = ibf_load_small_value(load, &reading_pos);
13094 VALUE arr = ibf_load_object(load, op);
13096 IC ic = &ISEQ_IS_IC_ENTRY(load_body, ic_index++);
13097 ic->
segments = array_to_idlist(arr);
13099 code[code_index] = (
VALUE)ic;
13106 unsigned int op = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13108 ISE ic = ISEQ_IS_ENTRY_START(load_body, operand_type) + op;
13109 code[code_index] = (
VALUE)ic;
13111 if (operand_type == TS_IVC) {
13114 if (insn == BIN(setinstancevariable)) {
13115 ID iv_name = (
ID)code[code_index - 1];
13116 cache->iv_set_name = iv_name;
13119 cache->iv_set_name = 0;
13122 vm_ic_attr_index_initialize(cache, INVALID_SHAPE_ID);
13129 code[code_index] = (
VALUE)cd_entries++;
13134 VALUE op = ibf_load_small_value(load, &reading_pos);
13135 code[code_index] = ibf_load_id(load, (
ID)(
VALUE)op);
13142 code[code_index] = (
VALUE)ibf_load_builtin(load, &reading_pos);
13145 code[code_index] = ibf_load_small_value(load, &reading_pos);
13149 if (insn_len(insn) != op_index+1) {
13154 load_body->iseq_size = code_index;
13156 if (ISEQ_MBITS_BUFLEN(load_body->iseq_size) == 1) {
13157 load_body->mark_bits.single = mark_offset_bits[0];
13160 if (needs_bitmap) {
13161 load_body->mark_bits.list = mark_offset_bits;
13164 load_body->mark_bits.list = 0;
13165 SIZED_FREE_N(mark_offset_bits, ISEQ_MBITS_BUFLEN(iseq_size));
13170 RUBY_ASSERT(reading_pos == bytecode_offset + bytecode_size);
13177 int opt_num = ISEQ_BODY(iseq)->param.opt_num;
13180 IBF_W_ALIGN(
VALUE);
13181 return ibf_dump_write(dump, ISEQ_BODY(iseq)->param.opt_table,
sizeof(
VALUE) * (opt_num + 1));
13184 return ibf_dump_pos(dump);
13189ibf_load_param_opt_table(
const struct ibf_load *load, ibf_offset_t opt_table_offset,
int opt_num)
13193 MEMCPY(table, load->current_buffer->buff + opt_table_offset,
VALUE, opt_num+1);
13204 const struct rb_iseq_param_keyword *kw = ISEQ_BODY(iseq)->param.keyword;
13207 struct rb_iseq_param_keyword dump_kw = *kw;
13208 int dv_num = kw->num - kw->required_num;
13213 for (i=0; i<kw->num; i++) ids[i] = (
ID)ibf_dump_id(dump, kw->table[i]);
13214 for (i=0; i<dv_num; i++) dvs[i] = (
VALUE)ibf_dump_object(dump, kw->default_values[i]);
13216 dump_kw.table = IBF_W(ids,
ID, kw->num);
13217 dump_kw.default_values = IBF_W(dvs,
VALUE, dv_num);
13218 IBF_W_ALIGN(
struct rb_iseq_param_keyword);
13219 return ibf_dump_write(dump, &dump_kw,
sizeof(
struct rb_iseq_param_keyword) * 1);
13226static const struct rb_iseq_param_keyword *
13227ibf_load_param_keyword(
const struct ibf_load *load, ibf_offset_t param_keyword_offset)
13229 if (param_keyword_offset) {
13230 struct rb_iseq_param_keyword *kw = IBF_R(param_keyword_offset,
struct rb_iseq_param_keyword, 1);
13231 int dv_num = kw->num - kw->required_num;
13232 VALUE *dvs = dv_num ? IBF_R(kw->default_values,
VALUE, dv_num) : NULL;
13235 for (i=0; i<dv_num; i++) {
13236 dvs[i] = ibf_load_object(load, dvs[i]);
13242 kw->default_values = dvs;
13253 ibf_offset_t offset = ibf_dump_pos(dump);
13257 for (i = 0; i < ISEQ_BODY(iseq)->insns_info.size; i++) {
13258 ibf_dump_write_small_value(dump, entries[i].line_no);
13259#ifdef USE_ISEQ_NODE_ID
13260 ibf_dump_write_small_value(dump, entries[i].node_id);
13262 ibf_dump_write_small_value(dump, entries[i].events);
13269ibf_load_insns_info_body(
const struct ibf_load *load, ibf_offset_t body_offset,
unsigned int size)
13271 ibf_offset_t reading_pos = body_offset;
13275 for (i = 0; i < size; i++) {
13276 entries[i].line_no = (int)ibf_load_small_value(load, &reading_pos);
13277#ifdef USE_ISEQ_NODE_ID
13278 entries[i].node_id = (int)ibf_load_small_value(load, &reading_pos);
13280 entries[i].events = (
rb_event_flag_t)ibf_load_small_value(load, &reading_pos);
13287ibf_dump_insns_info_positions(
struct ibf_dump *dump,
const unsigned int *positions,
unsigned int size)
13289 ibf_offset_t offset = ibf_dump_pos(dump);
13291 unsigned int last = 0;
13293 for (i = 0; i < size; i++) {
13294 ibf_dump_write_small_value(dump, positions[i] - last);
13295 last = positions[i];
13301static unsigned int *
13302ibf_load_insns_info_positions(
const struct ibf_load *load, ibf_offset_t positions_offset,
unsigned int size)
13304 ibf_offset_t reading_pos = positions_offset;
13305 unsigned int *positions =
ALLOC_N(
unsigned int, size);
13307 unsigned int last = 0;
13309 for (i = 0; i < size; i++) {
13310 positions[i] = last + (
unsigned int)ibf_load_small_value(load, &reading_pos);
13311 last = positions[i];
13321 const int size = body->local_table_size;
13325 for (i=0; i<size; i++) {
13326 VALUE v = ibf_dump_id(dump, body->local_table[i]);
13329 v = ibf_dump_object(dump,
ULONG2NUM(body->local_table[i]));
13335 return ibf_dump_write(dump, table,
sizeof(
ID) * size);
13339ibf_load_local_table(
const struct ibf_load *load, ibf_offset_t local_table_offset,
int size)
13342 ID *table = IBF_R(local_table_offset,
ID, size);
13345 for (i=0; i<size; i++) {
13346 table[i] = ibf_load_id(load, table[i]);
13349 if (size == 1 && table[0] == idERROR_INFO) {
13350 ruby_xfree_sized(table,
sizeof(
ID) * size);
13351 return rb_iseq_shared_exc_local_tbl;
13366 const int size = body->local_table_size;
13367 IBF_W_ALIGN(
enum lvar_state);
13368 return ibf_dump_write(dump, body->lvar_states,
sizeof(
enum lvar_state) * (body->lvar_states ? size : 0));
13371static enum lvar_state *
13372ibf_load_lvar_states(
const struct ibf_load *load, ibf_offset_t lvar_states_offset,
int size,
const ID *local_table)
13374 if (local_table == rb_iseq_shared_exc_local_tbl ||
13379 enum lvar_state *states = IBF_R(lvar_states_offset,
enum lvar_state, size);
13390 int *iseq_indices =
ALLOCA_N(
int, table->size);
13393 for (i=0; i<table->size; i++) {
13394 iseq_indices[i] = ibf_dump_iseq(dump, table->entries[i].iseq);
13397 const ibf_offset_t offset = ibf_dump_pos(dump);
13399 for (i=0; i<table->size; i++) {
13400 ibf_dump_write_small_value(dump, iseq_indices[i]);
13401 ibf_dump_write_small_value(dump, table->entries[i].type);
13402 ibf_dump_write_small_value(dump, table->entries[i].start);
13403 ibf_dump_write_small_value(dump, table->entries[i].end);
13404 ibf_dump_write_small_value(dump, table->entries[i].cont);
13405 ibf_dump_write_small_value(dump, table->entries[i].sp);
13410 return ibf_dump_pos(dump);
13415ibf_load_catch_table(
const struct ibf_load *load, ibf_offset_t catch_table_offset,
unsigned int size,
const rb_iseq_t *parent_iseq)
13418 struct iseq_catch_table *table = ruby_xcalloc(1, iseq_catch_table_bytes(size));
13419 table->size = size;
13420 ISEQ_BODY(parent_iseq)->catch_table = table;
13422 ibf_offset_t reading_pos = catch_table_offset;
13425 for (i=0; i<table->size; i++) {
13426 int iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13427 table->entries[i].type = (
enum rb_catch_type)ibf_load_small_value(load, &reading_pos);
13428 table->entries[i].start = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13429 table->entries[i].end = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13430 table->entries[i].cont = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13431 table->entries[i].sp = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13434 RB_OBJ_WRITE(parent_iseq, UNALIGNED_MEMBER_PTR(&table->entries[i], iseq), catch_iseq);
13438 ISEQ_BODY(parent_iseq)->catch_table = NULL;
13446 const unsigned int ci_size = body->ci_size;
13449 ibf_offset_t offset = ibf_dump_pos(dump);
13453 for (i = 0; i < ci_size; i++) {
13456 ibf_dump_write_small_value(dump, ibf_dump_id(dump, vm_ci_mid(ci)));
13457 ibf_dump_write_small_value(dump, vm_ci_flag(ci));
13458 ibf_dump_write_small_value(dump, vm_ci_argc(ci));
13462 int len = kwarg->keyword_len;
13463 ibf_dump_write_small_value(dump,
len);
13464 for (
int j=0; j<
len; j++) {
13465 VALUE keyword = ibf_dump_object(dump, kwarg->keywords[j]);
13466 ibf_dump_write_small_value(dump, keyword);
13470 ibf_dump_write_small_value(dump, 0);
13475 ibf_dump_write_small_value(dump, (
VALUE)-1);
13493static enum rb_id_table_iterator_result
13494store_outer_variable(
ID id,
VALUE val,
void *dump)
13499 pair->name = rb_id2str(
id);
13501 return ID_TABLE_CONTINUE;
13505outer_variable_cmp(
const void *a,
const void *b,
void *arg)
13513 else if (!bp->name) {
13523 struct rb_id_table * ovs = ISEQ_BODY(iseq)->outer_variables;
13525 ibf_offset_t offset = ibf_dump_pos(dump);
13527 size_t size = ovs ? rb_id_table_size(ovs) : 0;
13528 ibf_dump_write_small_value(dump, (
VALUE)size);
13537 rb_id_table_foreach(ovs, store_outer_variable, ovlist);
13539 for (
size_t i = 0; i < size; ++i) {
13540 ID id = ovlist->pairs[i].id;
13541 ID val = ovlist->pairs[i].val;
13542 ibf_dump_write_small_value(dump, ibf_dump_id(dump,
id));
13543 ibf_dump_write_small_value(dump, val);
13552ibf_load_ci_entries(
const struct ibf_load *load,
13553 ibf_offset_t ci_entries_offset,
13554 unsigned int ci_size,
13562 ibf_offset_t reading_pos = ci_entries_offset;
13569 for (i = 0; i < ci_size; i++) {
13570 VALUE mid_index = ibf_load_small_value(load, &reading_pos);
13571 if (mid_index != (
VALUE)-1) {
13572 ID mid = ibf_load_id(load, mid_index);
13573 unsigned int flag = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13574 unsigned int argc = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13577 int kwlen = (int)ibf_load_small_value(load, &reading_pos);
13580 kwarg->references = 0;
13581 kwarg->keyword_len = kwlen;
13582 for (
int j=0; j<kwlen; j++) {
13583 VALUE keyword = ibf_load_small_value(load, &reading_pos);
13584 kwarg->keywords[j] = ibf_load_object(load, keyword);
13588 cds[i].ci = vm_ci_new(mid, flag, argc, kwarg);
13590 cds[i].cc = vm_cc_empty();
13601ibf_load_outer_variables(
const struct ibf_load * load, ibf_offset_t outer_variables_offset)
13603 ibf_offset_t reading_pos = outer_variables_offset;
13607 size_t table_size = (size_t)ibf_load_small_value(load, &reading_pos);
13609 if (table_size > 0) {
13610 tbl = rb_id_table_create(table_size);
13613 for (
size_t i = 0; i < table_size; i++) {
13614 ID key = ibf_load_id(load, (
ID)ibf_load_small_value(load, &reading_pos));
13615 VALUE value = ibf_load_small_value(load, &reading_pos);
13616 if (!key) key = rb_make_temporary_id(i);
13617 rb_id_table_insert(tbl, key, value);
13626 RUBY_ASSERT(dump->current_buffer == &dump->global_buffer);
13628 unsigned int *positions;
13632 const VALUE location_pathobj_index = ibf_dump_object(dump, body->location.pathobj);
13633 const VALUE location_base_label_index = ibf_dump_object(dump, body->location.base_label);
13634 const VALUE location_label_index = ibf_dump_object(dump, body->location.label);
13636#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13637 ibf_offset_t iseq_start = ibf_dump_pos(dump);
13642 buffer.obj_table = ibf_dump_object_table_new();
13643 dump->current_buffer = &buffer;
13646 const ibf_offset_t bytecode_offset = ibf_dump_code(dump, iseq);
13647 const ibf_offset_t bytecode_size = ibf_dump_pos(dump) - bytecode_offset;
13648 const ibf_offset_t param_opt_table_offset = ibf_dump_param_opt_table(dump, iseq);
13649 const ibf_offset_t param_keyword_offset = ibf_dump_param_keyword(dump, iseq);
13650 const ibf_offset_t insns_info_body_offset = ibf_dump_insns_info_body(dump, iseq);
13652 positions = rb_iseq_insns_info_decode_positions(ISEQ_BODY(iseq));
13653 const ibf_offset_t insns_info_positions_offset = ibf_dump_insns_info_positions(dump, positions, body->insns_info.size);
13654 SIZED_FREE_N(positions, ISEQ_BODY(iseq)->insns_info.size);
13656 const ibf_offset_t local_table_offset = ibf_dump_local_table(dump, iseq);
13657 const ibf_offset_t lvar_states_offset = ibf_dump_lvar_states(dump, iseq);
13658 const unsigned int catch_table_size = body->catch_table ? body->catch_table->size : 0;
13659 const ibf_offset_t catch_table_offset = ibf_dump_catch_table(dump, iseq);
13660 const int parent_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->parent_iseq);
13661 const int local_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->local_iseq);
13662 const int mandatory_only_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->mandatory_only_iseq);
13663 const ibf_offset_t ci_entries_offset = ibf_dump_ci_entries(dump, iseq);
13664 const ibf_offset_t outer_variables_offset = ibf_dump_outer_variables(dump, iseq);
13666#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13667 ibf_offset_t local_obj_list_offset;
13668 unsigned int local_obj_list_size;
13670 ibf_dump_object_list(dump, &local_obj_list_offset, &local_obj_list_size);
13673 ibf_offset_t body_offset = ibf_dump_pos(dump);
13676 unsigned int param_flags =
13677 (body->param.flags.has_lead << 0) |
13678 (body->param.flags.has_opt << 1) |
13679 (body->param.flags.has_rest << 2) |
13680 (body->param.flags.has_post << 3) |
13681 (body->param.flags.has_kw << 4) |
13682 (body->param.flags.has_kwrest << 5) |
13683 (body->param.flags.has_block << 6) |
13684 (body->param.flags.ambiguous_param0 << 7) |
13685 (body->param.flags.accepts_no_kwarg << 8) |
13686 (body->param.flags.ruby2_keywords << 9) |
13687 (body->param.flags.anon_rest << 10) |
13688 (body->param.flags.anon_kwrest << 11) |
13689 (body->param.flags.use_block << 12) |
13690 (body->param.flags.forwardable << 13) |
13691 (body->param.flags.accepts_no_block << 14);
13693#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13694# define IBF_BODY_OFFSET(x) (x)
13696# define IBF_BODY_OFFSET(x) (body_offset - (x))
13699 ibf_dump_write_small_value(dump, body->type);
13700 ibf_dump_write_small_value(dump, body->iseq_size);
13701 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(bytecode_offset));
13702 ibf_dump_write_small_value(dump, bytecode_size);
13703 ibf_dump_write_small_value(dump, param_flags);
13704 ibf_dump_write_small_value(dump, body->param.size);
13705 ibf_dump_write_small_value(dump, body->param.lead_num);
13706 ibf_dump_write_small_value(dump, body->param.opt_num);
13707 ibf_dump_write_small_value(dump, body->param.rest_start);
13708 ibf_dump_write_small_value(dump, body->param.post_start);
13709 ibf_dump_write_small_value(dump, body->param.post_num);
13710 ibf_dump_write_small_value(dump, body->param.block_start);
13711 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(param_opt_table_offset));
13712 ibf_dump_write_small_value(dump, param_keyword_offset);
13713 ibf_dump_write_small_value(dump, location_pathobj_index);
13714 ibf_dump_write_small_value(dump, location_base_label_index);
13715 ibf_dump_write_small_value(dump, location_label_index);
13716 ibf_dump_write_small_value(dump, body->location.first_lineno);
13717 ibf_dump_write_small_value(dump, body->location.node_id);
13718 ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.lineno);
13719 ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.column);
13720 ibf_dump_write_small_value(dump, body->location.code_location.end_pos.lineno);
13721 ibf_dump_write_small_value(dump, body->location.code_location.end_pos.column);
13722 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_body_offset));
13723 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_positions_offset));
13724 ibf_dump_write_small_value(dump, body->insns_info.size);
13725 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(local_table_offset));
13726 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(lvar_states_offset));
13727 ibf_dump_write_small_value(dump, catch_table_size);
13728 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(catch_table_offset));
13729 ibf_dump_write_small_value(dump, parent_iseq_index);
13730 ibf_dump_write_small_value(dump, local_iseq_index);
13731 ibf_dump_write_small_value(dump, mandatory_only_iseq_index);
13732 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(ci_entries_offset));
13733 ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(outer_variables_offset));
13734 ibf_dump_write_small_value(dump, body->variable.flip_count);
13735 ibf_dump_write_small_value(dump, body->local_table_size);
13736 ibf_dump_write_small_value(dump, body->ivc_size);
13737 ibf_dump_write_small_value(dump, body->icvarc_size);
13738 ibf_dump_write_small_value(dump, body->ise_size);
13739 ibf_dump_write_small_value(dump, body->ic_size);
13740 ibf_dump_write_small_value(dump, body->ci_size);
13741 ibf_dump_write_small_value(dump, body->stack_max);
13742 ibf_dump_write_small_value(dump, body->builtin_attrs);
13743 ibf_dump_write_small_value(dump, body->prism ? 1 : 0);
13745#undef IBF_BODY_OFFSET
13747#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13748 ibf_offset_t iseq_length_bytes = ibf_dump_pos(dump);
13750 dump->current_buffer = saved_buffer;
13751 ibf_dump_write(dump, RSTRING_PTR(buffer.str), iseq_length_bytes);
13753 ibf_offset_t offset = ibf_dump_pos(dump);
13754 ibf_dump_write_small_value(dump, iseq_start);
13755 ibf_dump_write_small_value(dump, iseq_length_bytes);
13756 ibf_dump_write_small_value(dump, body_offset);
13758 ibf_dump_write_small_value(dump, local_obj_list_offset);
13759 ibf_dump_write_small_value(dump, local_obj_list_size);
13761 st_free_table(buffer.obj_table);
13765 return body_offset;
13770ibf_load_location_str(
const struct ibf_load *load,
VALUE str_index)
13772 VALUE str = ibf_load_object(load, str_index);
13774 str = rb_fstring(str);
13784 ibf_offset_t reading_pos = offset;
13786#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13788 load->current_buffer = &load->global_buffer;
13790 const ibf_offset_t iseq_start = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13791 const ibf_offset_t iseq_length_bytes = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13792 const ibf_offset_t body_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13795 buffer.buff = load->global_buffer.buff + iseq_start;
13796 buffer.size = iseq_length_bytes;
13797 buffer.obj_list_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13798 buffer.obj_list_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13799 buffer.obj_list = pinned_list_new(buffer.obj_list_size);
13801 load->current_buffer = &buffer;
13802 reading_pos = body_offset;
13805#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13806# define IBF_BODY_OFFSET(x) (x)
13808# define IBF_BODY_OFFSET(x) (offset - (x))
13811 const unsigned int type = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13812 const unsigned int iseq_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13813 const ibf_offset_t bytecode_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13814 const ibf_offset_t bytecode_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13815 const unsigned int param_flags = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13816 const unsigned int param_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13817 const int param_lead_num = (int)ibf_load_small_value(load, &reading_pos);
13818 const int param_opt_num = (int)ibf_load_small_value(load, &reading_pos);
13819 const int param_rest_start = (int)ibf_load_small_value(load, &reading_pos);
13820 const int param_post_start = (int)ibf_load_small_value(load, &reading_pos);
13821 const int param_post_num = (int)ibf_load_small_value(load, &reading_pos);
13822 const int param_block_start = (int)ibf_load_small_value(load, &reading_pos);
13823 const ibf_offset_t param_opt_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13824 const ibf_offset_t param_keyword_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13825 const VALUE location_pathobj_index = ibf_load_small_value(load, &reading_pos);
13826 const VALUE location_base_label_index = ibf_load_small_value(load, &reading_pos);
13827 const VALUE location_label_index = ibf_load_small_value(load, &reading_pos);
13828 const int location_first_lineno = (int)ibf_load_small_value(load, &reading_pos);
13829 const int location_node_id = (int)ibf_load_small_value(load, &reading_pos);
13830 const int location_code_location_beg_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13831 const int location_code_location_beg_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13832 const int location_code_location_end_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13833 const int location_code_location_end_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13834 const ibf_offset_t insns_info_body_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13835 const ibf_offset_t insns_info_positions_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13836 const unsigned int insns_info_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13837 const ibf_offset_t local_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13838 const ibf_offset_t lvar_states_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13839 const unsigned int catch_table_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13840 const ibf_offset_t catch_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13841 const int parent_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13842 const int local_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13843 const int mandatory_only_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13844 const ibf_offset_t ci_entries_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13845 const ibf_offset_t outer_variables_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13846 const rb_snum_t variable_flip_count = (rb_snum_t)ibf_load_small_value(load, &reading_pos);
13847 const unsigned int local_table_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13849 const unsigned int ivc_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13850 const unsigned int icvarc_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13851 const unsigned int ise_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13852 const unsigned int ic_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13854 const unsigned int ci_size = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13855 const unsigned int stack_max = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13856 const unsigned int builtin_attrs = (
unsigned int)ibf_load_small_value(load, &reading_pos);
13857 const bool prism = (bool)ibf_load_small_value(load, &reading_pos);
13860 VALUE path = ibf_load_object(load, location_pathobj_index);
13865 realpath = path = rb_fstring(path);
13868 VALUE pathobj = path;
13874 if (!
NIL_P(realpath)) {
13876 rb_raise(rb_eArgError,
"unexpected realpath %"PRIxVALUE
13877 "(%x), path=%+"PRIsVALUE,
13878 realpath,
TYPE(realpath), path);
13880 realpath = rb_fstring(realpath);
13886 rb_iseq_pathobj_set(iseq, path, realpath);
13891 VALUE dummy_frame = rb_vm_push_frame_fname(ec, path);
13893#undef IBF_BODY_OFFSET
13895 load_body->type =
type;
13896 load_body->stack_max = stack_max;
13897 load_body->param.flags.has_lead = (param_flags >> 0) & 1;
13898 load_body->param.flags.has_opt = (param_flags >> 1) & 1;
13899 load_body->param.flags.has_rest = (param_flags >> 2) & 1;
13900 load_body->param.flags.has_post = (param_flags >> 3) & 1;
13901 load_body->param.flags.has_kw = FALSE;
13902 load_body->param.flags.has_kwrest = (param_flags >> 5) & 1;
13903 load_body->param.flags.has_block = (param_flags >> 6) & 1;
13904 load_body->param.flags.ambiguous_param0 = (param_flags >> 7) & 1;
13905 load_body->param.flags.accepts_no_kwarg = (param_flags >> 8) & 1;
13906 load_body->param.flags.ruby2_keywords = (param_flags >> 9) & 1;
13907 load_body->param.flags.anon_rest = (param_flags >> 10) & 1;
13908 load_body->param.flags.anon_kwrest = (param_flags >> 11) & 1;
13909 load_body->param.flags.use_block = (param_flags >> 12) & 1;
13910 load_body->param.flags.forwardable = (param_flags >> 13) & 1;
13911 load_body->param.flags.accepts_no_block = (param_flags >> 14) & 1;
13912 load_body->param.size = param_size;
13913 load_body->param.lead_num = param_lead_num;
13914 load_body->param.opt_num = param_opt_num;
13915 load_body->param.rest_start = param_rest_start;
13916 load_body->param.post_start = param_post_start;
13917 load_body->param.post_num = param_post_num;
13918 load_body->param.block_start = param_block_start;
13919 load_body->local_table_size = local_table_size;
13920 load_body->ci_size = ci_size;
13921 load_body->insns_info.size = insns_info_size;
13923 ISEQ_COVERAGE_SET(iseq,
Qnil);
13924 ISEQ_ORIGINAL_ISEQ_CLEAR(iseq);
13925 load_body->variable.flip_count = variable_flip_count;
13926 load_body->variable.script_lines =
Qnil;
13928 load_body->location.first_lineno = location_first_lineno;
13929 load_body->location.node_id = location_node_id;
13930 load_body->location.code_location.beg_pos.lineno = location_code_location_beg_pos_lineno;
13931 load_body->location.code_location.beg_pos.column = location_code_location_beg_pos_column;
13932 load_body->location.code_location.end_pos.lineno = location_code_location_end_pos_lineno;
13933 load_body->location.code_location.end_pos.column = location_code_location_end_pos_column;
13934 load_body->builtin_attrs = builtin_attrs;
13935 load_body->prism = prism;
13937 load_body->ivc_size = ivc_size;
13938 load_body->icvarc_size = icvarc_size;
13939 load_body->ise_size = ise_size;
13940 load_body->ic_size = ic_size;
13942 if (ISEQ_IS_SIZE(load_body)) {
13946 load_body->is_entries = NULL;
13948 ibf_load_ci_entries(load, ci_entries_offset, ci_size, &load_body->call_data);
13949 load_body->outer_variables = ibf_load_outer_variables(load, outer_variables_offset);
13950 load_body->param.opt_table = ibf_load_param_opt_table(load, param_opt_table_offset, param_opt_num);
13951 load_body->param.keyword = ibf_load_param_keyword(load, param_keyword_offset);
13952 load_body->param.flags.has_kw = (param_flags >> 4) & 1;
13953 load_body->insns_info.body = ibf_load_insns_info_body(load, insns_info_body_offset, insns_info_size);
13954 load_body->insns_info.positions = ibf_load_insns_info_positions(load, insns_info_positions_offset, insns_info_size);
13955 load_body->local_table = ibf_load_local_table(load, local_table_offset, local_table_size);
13956 load_body->lvar_states = ibf_load_lvar_states(load, lvar_states_offset, local_table_size, load_body->local_table);
13957 ibf_load_catch_table(load, catch_table_offset, catch_table_size, iseq);
13961 const rb_iseq_t *mandatory_only_iseq = ibf_load_iseq(load, (
const rb_iseq_t *)(
VALUE)mandatory_only_iseq_index);
13963 RB_OBJ_WRITE(iseq, &load_body->parent_iseq, parent_iseq);
13964 RB_OBJ_WRITE(iseq, &load_body->local_iseq, local_iseq);
13965 RB_OBJ_WRITE(iseq, &load_body->mandatory_only_iseq, mandatory_only_iseq);
13968 if (load_body->param.keyword != NULL) {
13970 struct rb_iseq_param_keyword *keyword = (
struct rb_iseq_param_keyword *) load_body->param.keyword;
13971 keyword->table = &load_body->local_table[keyword->bits_start - keyword->num];
13974 ibf_load_code(load, iseq, bytecode_offset, bytecode_size, iseq_size);
13975#if VM_INSN_INFO_TABLE_IMPL == 2
13976 rb_iseq_insns_info_encode_positions(iseq);
13979 rb_iseq_translate_threaded_code(iseq);
13981#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13982 load->current_buffer = &load->global_buffer;
13985 RB_OBJ_WRITE(iseq, &load_body->location.base_label, ibf_load_location_str(load, location_base_label_index));
13986 RB_OBJ_WRITE(iseq, &load_body->location.label, ibf_load_location_str(load, location_label_index));
13988#if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13989 load->current_buffer = saved_buffer;
13991 verify_call_cache(iseq);
13994 rb_vm_pop_frame_no_int(ec);
14004ibf_dump_iseq_list_i(st_data_t key, st_data_t val, st_data_t ptr)
14009 ibf_offset_t offset = ibf_dump_iseq_each(args->dump, iseq);
14012 return ST_CONTINUE;
14022 args.offset_list = offset_list;
14024 st_foreach(dump->iseq_table, ibf_dump_iseq_list_i, (st_data_t)&args);
14027 st_index_t size = dump->iseq_table->num_entries;
14028 ibf_offset_t *offsets =
ALLOCA_N(ibf_offset_t, size);
14030 for (i = 0; i < size; i++) {
14034 ibf_dump_align(dump,
sizeof(ibf_offset_t));
14035 header->iseq_list_offset = ibf_dump_write(dump, offsets,
sizeof(ibf_offset_t) * size);
14036 header->iseq_list_size = (
unsigned int)size;
14046 unsigned int type: 5;
14047 unsigned int special_const: 1;
14048 unsigned int frozen: 1;
14049 unsigned int internal: 1;
14052enum ibf_object_class_index {
14053 IBF_OBJECT_CLASS_OBJECT,
14054 IBF_OBJECT_CLASS_ARRAY,
14055 IBF_OBJECT_CLASS_STANDARD_ERROR,
14056 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR,
14057 IBF_OBJECT_CLASS_TYPE_ERROR,
14058 IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR,
14068 long keyval[FLEX_ARY_LEN];
14081 BDIGIT digits[FLEX_ARY_LEN];
14084enum ibf_object_data_type {
14085 IBF_OBJECT_DATA_ENCODING,
14096#define IBF_ALIGNED_OFFSET(align, offset) \
14097 ((((offset) - 1) / (align) + 1) * (align))
14102#define IBF_OBJBODY(type, offset) \
14103 ibf_load_check_offset(load, IBF_ALIGNED_OFFSET(RUBY_ALIGNOF(type), offset))
14106ibf_load_check_offset(
const struct ibf_load *load,
size_t offset)
14108 if (offset >= load->current_buffer->size) {
14109 rb_raise(
rb_eIndexError,
"object offset out of range: %"PRIdSIZE, offset);
14111 return load->current_buffer->buff + offset;
14114NORETURN(
static void ibf_dump_object_unsupported(
struct ibf_dump *dump,
VALUE obj));
14117ibf_dump_object_unsupported(
struct ibf_dump *dump,
VALUE obj)
14120 rb_raw_obj_info(buff,
sizeof(buff), obj);
14129 rb_raise(rb_eArgError,
"unsupported");
14136 enum ibf_object_class_index cindex;
14138 cindex = IBF_OBJECT_CLASS_OBJECT;
14141 cindex = IBF_OBJECT_CLASS_ARRAY;
14144 cindex = IBF_OBJECT_CLASS_STANDARD_ERROR;
14147 cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR;
14150 cindex = IBF_OBJECT_CLASS_TYPE_ERROR;
14153 cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR;
14156 rb_obj_info_dump(obj);
14158 rb_bug(
"unsupported class");
14160 ibf_dump_write_small_value(dump, (
VALUE)cindex);
14166 enum ibf_object_class_index cindex = (
enum ibf_object_class_index)ibf_load_small_value(load, &offset);
14169 case IBF_OBJECT_CLASS_OBJECT:
14171 case IBF_OBJECT_CLASS_ARRAY:
14173 case IBF_OBJECT_CLASS_STANDARD_ERROR:
14175 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR:
14177 case IBF_OBJECT_CLASS_TYPE_ERROR:
14179 case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR:
14183 rb_raise(rb_eArgError,
"ibf_load_object_class: unknown class (%d)", (
int)cindex);
14191 (void)IBF_W(&dbl,
double, 1);
14199 memcpy(&d, IBF_OBJBODY(
double, offset),
sizeof(d));
14201 if (!
FLONUM_P(f)) RB_OBJ_SET_SHAREABLE(f);
14208 long encindex = (long)rb_enc_get_index(obj);
14209 long len = RSTRING_LEN(obj);
14210 const char *ptr = RSTRING_PTR(obj);
14212 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14213 rb_encoding *enc = rb_enc_from_index((
int)encindex);
14214 const char *enc_name = rb_enc_name(enc);
14215 encindex = RUBY_ENCINDEX_BUILTIN_MAX + ibf_dump_object(dump,
rb_str_new2(enc_name));
14218 ibf_dump_write_small_value(dump, encindex);
14219 ibf_dump_write_small_value(dump,
len);
14220 IBF_WP(ptr,
char,
len);
14226 ibf_offset_t reading_pos = offset;
14228 int encindex = (int)ibf_load_small_value(load, &reading_pos);
14229 const long len = (long)ibf_load_small_value(load, &reading_pos);
14230 const char *ptr = load->current_buffer->buff + reading_pos;
14232 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14233 VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
14234 encindex = rb_enc_find_index(RSTRING_PTR(enc_name_str));
14238 if (header->frozen && !header->internal) {
14239 str = rb_enc_literal_str(ptr,
len, rb_enc_from_index(encindex));
14242 str = rb_enc_str_new(ptr,
len, rb_enc_from_index(encindex));
14245 if (header->frozen) str = rb_fstring(str);
14256 regexp.srcstr = (long)ibf_dump_object(dump, srcstr);
14258 ibf_dump_write_byte(dump, (
unsigned char)regexp.option);
14259 ibf_dump_write_small_value(dump, regexp.srcstr);
14266 regexp.option = ibf_load_byte(load, &offset);
14267 regexp.srcstr = ibf_load_small_value(load, &offset);
14269 VALUE srcstr = ibf_load_object(load, regexp.srcstr);
14270 VALUE reg = rb_reg_compile(srcstr, (
int)regexp.option, NULL, 0);
14273 if (header->frozen) RB_OBJ_SET_SHAREABLE(
rb_obj_freeze(reg));
14282 ibf_dump_write_small_value(dump,
len);
14283 for (i=0; i<
len; i++) {
14284 long index = (long)ibf_dump_object(dump,
RARRAY_AREF(obj, i));
14285 ibf_dump_write_small_value(dump, index);
14292 ibf_offset_t reading_pos = offset;
14294 const long len = (long)ibf_load_small_value(load, &reading_pos);
14299 for (i=0; i<
len; i++) {
14300 const VALUE index = ibf_load_small_value(load, &reading_pos);
14304 if (header->frozen) {
14313ibf_dump_object_hash_i(st_data_t key, st_data_t val, st_data_t ptr)
14317 VALUE key_index = ibf_dump_object(dump, (
VALUE)key);
14318 VALUE val_index = ibf_dump_object(dump, (
VALUE)val);
14320 ibf_dump_write_small_value(dump, key_index);
14321 ibf_dump_write_small_value(dump, val_index);
14322 return ST_CONTINUE;
14329 ibf_dump_write_small_value(dump, (
VALUE)
len);
14337 long len = (long)ibf_load_small_value(load, &offset);
14338 VALUE obj = rb_hash_new_with_size(
len);
14341 for (i = 0; i <
len; i++) {
14342 VALUE key_index = ibf_load_small_value(load, &offset);
14343 VALUE val_index = ibf_load_small_value(load, &offset);
14345 VALUE key = ibf_load_object(load, key_index);
14346 VALUE val = ibf_load_object(load, val_index);
14347 rb_hash_aset(obj, key, val);
14349 rb_hash_rehash(obj);
14352 if (header->frozen) {
14353 RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14367 range.class_index = 0;
14370 range.beg = (long)ibf_dump_object(dump, beg);
14371 range.end = (long)ibf_dump_object(dump, end);
14377 rb_raise(
rb_eNotImpError,
"ibf_dump_object_struct: unsupported class %"PRIsVALUE,
14386 VALUE beg = ibf_load_object(load, range->beg);
14387 VALUE end = ibf_load_object(load, range->end);
14390 if (header->frozen) RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14397 ssize_t
len = BIGNUM_LEN(obj);
14398 ssize_t slen = BIGNUM_SIGN(obj) > 0 ?
len :
len * -1;
14399 BDIGIT *d = BIGNUM_DIGITS(obj);
14401 (void)IBF_W(&slen, ssize_t, 1);
14402 IBF_WP(d, BDIGIT,
len);
14409 int sign = bignum->slen > 0;
14410 ssize_t
len = sign > 0 ? bignum->slen : -1 * bignum->slen;
14411 const int big_unpack_flags =
14414 VALUE obj = rb_integer_unpack(bignum->digits,
len,
sizeof(BDIGIT), 0,
14418 if (header->frozen) RB_OBJ_SET_FROZEN_SHAREABLE(obj);
14425 if (rb_data_is_encoding(obj)) {
14427 const char *name = rb_enc_name(enc);
14428 long len = strlen(name) + 1;
14430 data[0] = IBF_OBJECT_DATA_ENCODING;
14432 (void)IBF_W(data,
long, 2);
14433 IBF_WP(name,
char,
len);
14436 ibf_dump_object_unsupported(dump, obj);
14443 const long *body = IBF_OBJBODY(
long, offset);
14444 const enum ibf_object_data_type
type = (
enum ibf_object_data_type)body[0];
14446 const char *data = (
const char *)&body[2];
14449 case IBF_OBJECT_DATA_ENCODING:
14451 VALUE encobj = rb_enc_from_encoding(rb_enc_find(data));
14456 return ibf_load_object_unsupported(load, header, offset);
14460ibf_dump_object_complex_rational(
struct ibf_dump *dump,
VALUE obj)
14463 data[0] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->real);
14464 data[1] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->imag);
14466 (void)IBF_W(data,
long, 2);
14470ibf_load_object_complex_rational(
const struct ibf_load *load,
const struct ibf_object_header *header, ibf_offset_t offset)
14473 VALUE a = ibf_load_object(load, nums->a);
14474 VALUE b = ibf_load_object(load, nums->b);
14486 ibf_dump_object_string(dump,
rb_sym2str(obj));
14492 ibf_offset_t reading_pos = offset;
14494 int encindex = (int)ibf_load_small_value(load, &reading_pos);
14495 const long len = (long)ibf_load_small_value(load, &reading_pos);
14496 const char *ptr = load->current_buffer->buff + reading_pos;
14498 if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14499 VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
14500 encindex = rb_enc_find_index(RSTRING_PTR(enc_name_str));
14503 ID id = rb_intern3(ptr,
len, rb_enc_from_index(encindex));
14507typedef void (*ibf_dump_object_function)(
struct ibf_dump *dump,
VALUE obj);
14508static const ibf_dump_object_function dump_object_functions[
RUBY_T_MASK+1] = {
14509 ibf_dump_object_unsupported,
14510 ibf_dump_object_unsupported,
14511 ibf_dump_object_class,
14512 ibf_dump_object_unsupported,
14513 ibf_dump_object_float,
14514 ibf_dump_object_string,
14515 ibf_dump_object_regexp,
14516 ibf_dump_object_array,
14517 ibf_dump_object_hash,
14518 ibf_dump_object_struct,
14519 ibf_dump_object_bignum,
14520 ibf_dump_object_unsupported,
14521 ibf_dump_object_data,
14522 ibf_dump_object_unsupported,
14523 ibf_dump_object_complex_rational,
14524 ibf_dump_object_complex_rational,
14525 ibf_dump_object_unsupported,
14526 ibf_dump_object_unsupported,
14527 ibf_dump_object_unsupported,
14528 ibf_dump_object_unsupported,
14529 ibf_dump_object_symbol,
14530 ibf_dump_object_unsupported,
14531 ibf_dump_object_unsupported,
14532 ibf_dump_object_unsupported,
14533 ibf_dump_object_unsupported,
14534 ibf_dump_object_unsupported,
14535 ibf_dump_object_unsupported,
14536 ibf_dump_object_unsupported,
14537 ibf_dump_object_unsupported,
14538 ibf_dump_object_unsupported,
14539 ibf_dump_object_unsupported,
14540 ibf_dump_object_unsupported,
14546 unsigned char byte =
14547 (header.type << 0) |
14548 (header.special_const << 5) |
14549 (header.frozen << 6) |
14550 (header.internal << 7);
14556ibf_load_object_object_header(const struct
ibf_load *load, ibf_offset_t *offset)
14558 unsigned char byte = ibf_load_byte(load, offset);
14561 header.type = (
byte >> 0) & 0x1f;
14562 header.special_const = (
byte >> 5) & 0x01;
14563 header.frozen = (
byte >> 6) & 0x01;
14564 header.internal = (
byte >> 7) & 0x01;
14573 ibf_offset_t current_offset;
14574 IBF_ZERO(obj_header);
14575 obj_header.type =
TYPE(obj);
14577 IBF_W_ALIGN(ibf_offset_t);
14578 current_offset = ibf_dump_pos(dump);
14583 obj_header.special_const = TRUE;
14584 obj_header.frozen = TRUE;
14585 obj_header.internal = TRUE;
14586 ibf_dump_object_object_header(dump, obj_header);
14587 ibf_dump_write_small_value(dump, obj);
14591 obj_header.special_const = FALSE;
14592 obj_header.frozen =
OBJ_FROZEN(obj) ? TRUE : FALSE;
14593 ibf_dump_object_object_header(dump, obj_header);
14594 (*dump_object_functions[obj_header.type])(dump, obj);
14597 return current_offset;
14601static const ibf_load_object_function load_object_functions[
RUBY_T_MASK+1] = {
14602 ibf_load_object_unsupported,
14603 ibf_load_object_unsupported,
14604 ibf_load_object_class,
14605 ibf_load_object_unsupported,
14606 ibf_load_object_float,
14607 ibf_load_object_string,
14608 ibf_load_object_regexp,
14609 ibf_load_object_array,
14610 ibf_load_object_hash,
14611 ibf_load_object_struct,
14612 ibf_load_object_bignum,
14613 ibf_load_object_unsupported,
14614 ibf_load_object_data,
14615 ibf_load_object_unsupported,
14616 ibf_load_object_complex_rational,
14617 ibf_load_object_complex_rational,
14618 ibf_load_object_unsupported,
14619 ibf_load_object_unsupported,
14620 ibf_load_object_unsupported,
14621 ibf_load_object_unsupported,
14622 ibf_load_object_symbol,
14623 ibf_load_object_unsupported,
14624 ibf_load_object_unsupported,
14625 ibf_load_object_unsupported,
14626 ibf_load_object_unsupported,
14627 ibf_load_object_unsupported,
14628 ibf_load_object_unsupported,
14629 ibf_load_object_unsupported,
14630 ibf_load_object_unsupported,
14631 ibf_load_object_unsupported,
14632 ibf_load_object_unsupported,
14633 ibf_load_object_unsupported,
14637ibf_load_object(
const struct ibf_load *load,
VALUE object_index)
14639 if (object_index == 0) {
14643 VALUE obj = pinned_list_fetch(load->current_buffer->obj_list, (
long)object_index);
14645 ibf_offset_t *offsets = (ibf_offset_t *)(load->current_buffer->obj_list_offset + load->current_buffer->buff);
14646 ibf_offset_t offset = offsets[object_index];
14647 const struct ibf_object_header header = ibf_load_object_object_header(load, &offset);
14650 fprintf(stderr,
"ibf_load_object: list=%#x offsets=%p offset=%#x\n",
14651 load->current_buffer->obj_list_offset, (
void *)offsets, offset);
14652 fprintf(stderr,
"ibf_load_object: type=%#x special=%d frozen=%d internal=%d\n",
14653 header.type, header.special_const, header.frozen, header.internal);
14655 if (offset >= load->current_buffer->size) {
14656 rb_raise(
rb_eIndexError,
"object offset out of range: %u", offset);
14659 if (header.special_const) {
14660 ibf_offset_t reading_pos = offset;
14662 obj = ibf_load_small_value(load, &reading_pos);
14665 obj = (*load_object_functions[header.type])(load, &header, offset);
14668 pinned_list_store(load->current_buffer->obj_list, (
long)object_index, obj);
14671 fprintf(stderr,
"ibf_load_object: index=%#"PRIxVALUE
" obj=%#"PRIxVALUE
"\n",
14672 object_index, obj);
14685ibf_dump_object_list_i(st_data_t key, st_data_t val, st_data_t ptr)
14690 ibf_offset_t offset = ibf_dump_object_object(args->dump, obj);
14693 return ST_CONTINUE;
14697ibf_dump_object_list(
struct ibf_dump *dump, ibf_offset_t *obj_list_offset,
unsigned int *obj_list_size)
14699 st_table *obj_table = dump->current_buffer->obj_table;
14704 args.offset_list = offset_list;
14706 st_foreach(obj_table, ibf_dump_object_list_i, (st_data_t)&args);
14708 IBF_W_ALIGN(ibf_offset_t);
14709 *obj_list_offset = ibf_dump_pos(dump);
14711 st_index_t size = obj_table->num_entries;
14714 for (i=0; i<size; i++) {
14719 *obj_list_size = (
unsigned int)size;
14723ibf_dump_mark(
void *ptr)
14726 rb_gc_mark(dump->global_buffer.str);
14728 rb_mark_set(dump->global_buffer.obj_table);
14729 rb_mark_set(dump->iseq_table);
14733ibf_dump_free(
void *ptr)
14736 if (dump->global_buffer.obj_table) {
14737 st_free_table(dump->global_buffer.obj_table);
14738 dump->global_buffer.obj_table = 0;
14740 if (dump->iseq_table) {
14741 st_free_table(dump->iseq_table);
14742 dump->iseq_table = 0;
14747ibf_dump_memsize(
const void *ptr)
14751 if (dump->iseq_table) size += st_memsize(dump->iseq_table);
14752 if (dump->global_buffer.obj_table) size += st_memsize(dump->global_buffer.obj_table);
14758 {ibf_dump_mark, ibf_dump_free, ibf_dump_memsize,},
14765 dump->global_buffer.obj_table = NULL;
14766 dump->iseq_table = NULL;
14769 dump->global_buffer.obj_table = ibf_dump_object_table_new();
14770 dump->iseq_table = st_init_numtable();
14772 dump->current_buffer = &dump->global_buffer;
14783 if (ISEQ_BODY(iseq)->parent_iseq != NULL ||
14784 ISEQ_BODY(iseq)->local_iseq != iseq) {
14787 if (
RTEST(ISEQ_COVERAGE(iseq))) {
14792 ibf_dump_setup(dump, dump_obj);
14794 ibf_dump_write(dump, &header,
sizeof(header));
14795 ibf_dump_iseq(dump, iseq);
14797 header.magic[0] =
'Y';
14798 header.magic[1] =
'A';
14799 header.magic[2] =
'R';
14800 header.magic[3] =
'B';
14801 header.major_version = IBF_MAJOR_VERSION;
14802 header.minor_version = IBF_MINOR_VERSION;
14803 header.endian = IBF_ENDIAN_MARK;
14805 ibf_dump_iseq_list(dump, &header);
14806 ibf_dump_object_list(dump, &header.global_object_list_offset, &header.global_object_list_size);
14807 header.size = ibf_dump_pos(dump);
14810 VALUE opt_str = opt;
14813 ibf_dump_write(dump, ptr, header.extra_size);
14816 header.extra_size = 0;
14819 ibf_dump_overwrite(dump, &header,
sizeof(header), 0);
14821 str = dump->global_buffer.str;
14826static const ibf_offset_t *
14827ibf_iseq_list(
const struct ibf_load *load)
14829 return (
const ibf_offset_t *)(load->global_buffer.buff + load->header->iseq_list_offset);
14833rb_ibf_load_iseq_complete(
rb_iseq_t *iseq)
14837 ibf_offset_t offset = ibf_iseq_list(load)[iseq->aux.loader.index];
14840 fprintf(stderr,
"rb_ibf_load_iseq_complete: index=%#x offset=%#x size=%#x\n",
14841 iseq->aux.loader.index, offset,
14842 load->header->size);
14844 ibf_load_iseq_each(load, iseq, offset);
14845 ISEQ_COMPILE_DATA_CLEAR(iseq);
14847 rb_iseq_init_trace(iseq);
14848 load->iseq = prev_src_iseq;
14855 rb_ibf_load_iseq_complete((
rb_iseq_t *)iseq);
14863 int iseq_index = (int)(
VALUE)index_iseq;
14866 fprintf(stderr,
"ibf_load_iseq: index_iseq=%p iseq_list=%p\n",
14867 (
void *)index_iseq, (
void *)load->iseq_list);
14869 if (iseq_index == -1) {
14873 VALUE iseqv = pinned_list_fetch(load->iseq_list, iseq_index);
14876 fprintf(stderr,
"ibf_load_iseq: iseqv=%p\n", (
void *)iseqv);
14884 fprintf(stderr,
"ibf_load_iseq: new iseq=%p\n", (
void *)iseq);
14887 iseq->aux.loader.obj = load->loader_obj;
14888 iseq->aux.loader.index = iseq_index;
14890 fprintf(stderr,
"ibf_load_iseq: iseq=%p loader_obj=%p index=%d\n",
14891 (
void *)iseq, (
void *)load->loader_obj, iseq_index);
14893 pinned_list_store(load->iseq_list, iseq_index, (
VALUE)iseq);
14895 if (!USE_LAZY_LOAD || GET_VM()->builtin_function_table) {
14897 fprintf(stderr,
"ibf_load_iseq: loading iseq=%p\n", (
void *)iseq);
14899 rb_ibf_load_iseq_complete(iseq);
14903 fprintf(stderr,
"ibf_load_iseq: iseq=%p loaded %p\n",
14904 (
void *)iseq, (
void *)load->iseq);
14912ibf_load_setup_bytes(
struct ibf_load *load,
VALUE loader_obj,
const char *bytes,
size_t size)
14915 load->loader_obj = loader_obj;
14916 load->global_buffer.buff = bytes;
14917 load->header = header;
14918 load->global_buffer.size = header->size;
14919 load->global_buffer.obj_list_offset = header->global_object_list_offset;
14920 load->global_buffer.obj_list_size = header->global_object_list_size;
14921 RB_OBJ_WRITE(loader_obj, &load->iseq_list, pinned_list_new(header->iseq_list_size));
14922 RB_OBJ_WRITE(loader_obj, &load->global_buffer.obj_list, pinned_list_new(load->global_buffer.obj_list_size));
14925 load->current_buffer = &load->global_buffer;
14927 if (size < header->size) {
14930 if (strncmp(header->magic,
"YARB", 4) != 0) {
14933 if (header->major_version != IBF_MAJOR_VERSION ||
14934 header->minor_version != IBF_MINOR_VERSION) {
14936 header->major_version, header->minor_version, IBF_MAJOR_VERSION, IBF_MINOR_VERSION);
14938 if (header->endian != IBF_ENDIAN_MARK) {
14944 if (header->iseq_list_offset %
RUBY_ALIGNOF(ibf_offset_t)) {
14945 rb_raise(rb_eArgError,
"unaligned iseq list offset: %u",
14946 header->iseq_list_offset);
14948 if (load->global_buffer.obj_list_offset %
RUBY_ALIGNOF(ibf_offset_t)) {
14949 rb_raise(rb_eArgError,
"unaligned object list offset: %u",
14950 load->global_buffer.obj_list_offset);
14963 if (USE_LAZY_LOAD) {
14964 str =
rb_str_new(RSTRING_PTR(str), RSTRING_LEN(str));
14967 ibf_load_setup_bytes(load, loader_obj, RSTRING_PTR(str), RSTRING_LEN(str));
14972ibf_loader_mark(
void *ptr)
14975 rb_gc_mark(load->str);
14976 rb_gc_mark(load->iseq_list);
14977 rb_gc_mark(load->global_buffer.obj_list);
14981ibf_loader_free(
void *ptr)
14988ibf_loader_memsize(
const void *ptr)
14995 {ibf_loader_mark, ibf_loader_free, ibf_loader_memsize,},
15000rb_iseq_ibf_load(
VALUE str)
15006 ibf_load_setup(load, loader_obj, str);
15007 iseq = ibf_load_iseq(load, 0);
15014rb_iseq_ibf_load_bytes(
const char *bytes,
size_t size)
15020 ibf_load_setup_bytes(load, loader_obj, bytes, size);
15021 iseq = ibf_load_iseq(load, 0);
15028rb_iseq_ibf_load_extra_data(
VALUE str)
15034 ibf_load_setup(load, loader_obj, str);
15035 extra_str =
rb_str_new(load->global_buffer.buff + load->header->size, load->header->extra_size);
15040#include "prism_compile.c"
#define RBIMPL_ASSERT_OR_ASSUME(...)
This is either RUBY_ASSERT or RBIMPL_ASSUME, depending on RUBY_DEBUG.
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ALIGNOF
Wraps (or simulates) alignof.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_NONE
No events.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
#define rb_str_new2
Old name of rb_str_new_cstr.
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
#define TYPE(_)
Old name of rb_type.
#define NUM2ULONG
Old name of RB_NUM2ULONG.
#define NUM2LL
Old name of RB_NUM2LL.
#define ALLOCV
Old name of RB_ALLOCV.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define OBJ_FROZEN
Old name of RB_OBJ_FROZEN.
#define rb_str_cat2
Old name of rb_str_cat_cstr.
#define T_NIL
Old name of RUBY_T_NIL.
#define UNREACHABLE
Old name of RBIMPL_UNREACHABLE.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define OBJ_FREEZE
Old name of RB_OBJ_FREEZE.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define SYM2ID
Old name of RB_SYM2ID.
#define FIX2UINT
Old name of RB_FIX2UINT.
#define ZALLOC
Old name of RB_ZALLOC.
#define CLASS_OF
Old name of rb_class_of.
#define FIXABLE
Old name of RB_FIXABLE.
#define xmalloc
Old name of ruby_xmalloc.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define NUM2UINT
Old name of RB_NUM2UINT.
#define ZALLOC_N
Old name of RB_ZALLOC_N.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_SET
Old name of RB_FL_SET.
#define FLONUM_P
Old name of RB_FLONUM_P.
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define NUM2LONG
Old name of RB_NUM2LONG.
#define FL_UNSET
Old name of RB_FL_UNSET.
#define UINT2NUM
Old name of RB_UINT2NUM.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define CONST_ID
Old name of RUBY_CONST_ID.
#define ALLOCV_END
Old name of RB_ALLOCV_END.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
#define T_REGEXP
Old name of RUBY_T_REGEXP.
#define ruby_debug
This variable controls whether the interpreter is in debug mode.
VALUE rb_eNotImpError
NotImplementedError exception.
VALUE rb_eStandardError
StandardError exception.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eNoMatchingPatternError
NoMatchingPatternError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
VALUE rb_eNoMatchingPatternKeyError
NoMatchingPatternKeyError exception.
VALUE rb_eIndexError
IndexError exception.
VALUE rb_eSyntaxError
SyntaxError exception.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_obj_reveal(VALUE obj, VALUE klass)
Make a hidden object visible again.
VALUE rb_cArray
Array class.
VALUE rb_cObject
Object class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
VALUE rb_cHash
Hash class.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cRange
Range class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define RB_POSFIXABLE(_)
Checks if the passed value is in range of fixnum, assuming it is a positive number.
Defines RBIMPL_HAS_BUILTIN.
VALUE rb_ary_reverse(VALUE ary)
Destructively reverses the passed array in-place.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_clear(VALUE ary)
Destructively removes everything form an array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
VALUE rb_ary_join(VALUE ary, VALUE sep)
Recursively stringises the elements of the passed array, flattens that result, then joins the sequenc...
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define INTEGER_PACK_NATIVE_BYTE_ORDER
Means either INTEGER_PACK_MSBYTE_FIRST or INTEGER_PACK_LSBYTE_FIRST, depending on the host processor'...
#define INTEGER_PACK_NEGATIVE
Interprets the input as a signed negative number (unpack only).
#define INTEGER_PACK_LSWORD_FIRST
Stores/interprets the least significant word as the first word.
int rb_is_const_id(ID id)
Classifies the given ID, then sees if it is a constant.
int rb_is_attrset_id(ID id)
Classifies the given ID, then sees if it is an attribute writer.
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
VALUE rb_rational_new(VALUE num, VALUE den)
Constructs a Rational, with reduction.
int rb_reg_options(VALUE re)
Queries the options of the passed regular expression.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_tmp_new(long len)
Allocates a "temporary" string.
int rb_str_hash_cmp(VALUE str1, VALUE str2)
Compares two strings.
#define rb_str_new(str, len)
Allocates an instance of rb_cString.
st_index_t rb_str_hash(VALUE str)
Calculates a hash value of a string.
VALUE rb_str_cat(VALUE dst, const char *src, long srclen)
Destructively appends the passed contents to the string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
int rb_str_cmp(VALUE lhs, VALUE rhs)
Compares two strings, as in strcmp(3).
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
VALUE rb_str_freeze(VALUE str)
This is the implementation of String#freeze.
#define rb_str_new_cstr(str)
Identical to rb_str_new, except it assumes the passed pointer is a pointer to a C string.
VALUE rb_class_name(VALUE obj)
Queries the name of the given object's class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
VALUE rb_id2sym(ID id)
Allocates an instance of rb_cSymbol that has the given id.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
ID rb_sym2id(VALUE obj)
Converts an instance of rb_cSymbol into an ID.
int len
Length of the buffer.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_ractor_make_shareable(VALUE obj)
Destructively transforms the passed object so that multiple Ractors can share it.
#define DECIMAL_SIZE_OF(expr)
An approximation of decimal representation size.
void ruby_qsort(void *, const size_t, const size_t, int(*)(const void *, const void *, void *), void *)
Reentrant implementation of quick sort.
#define rb_long2int
Just another name of rb_long2int_inline.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define MEMZERO(p, type, n)
Handy macro to erase a region of memory.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define RB_ALLOCV(v, n)
Identical to RB_ALLOCV_N(), except that it allocates a number of bytes and returns a void* .
VALUE type(ANYARGS)
ANYARGS-ed function type.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
#define RBIMPL_ATTR_NORETURN()
Wraps (or simulates) [[noreturn]]
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static void RARRAY_ASET(VALUE ary, long i, VALUE v)
Assigns an object in an array.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RUBY_DEFAULT_FREE
This is a value you can set to RData::dfree.
void(* RUBY_DATA_FUNC)(void *)
This is the type of callbacks registered to RData.
#define RHASH_SIZE(h)
Queries the size of the hash.
static VALUE RREGEXP_SRC(VALUE rexp)
Convenient getter function.
#define StringValue(v)
Ensures that the parameter object is a String.
#define StringValuePtr(v)
Identical to StringValue, except it returns a char*.
static int RSTRING_LENINT(VALUE str)
Identical to RSTRING_LEN(), except it differs for the return type.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RTYPEDDATA_DATA(v)
Convenient getter macro.
#define RUBY_TYPED_FREE_IMMEDIATELY
Macros to see if each corresponding flag is defined.
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
#define TypedData_Wrap_Struct(klass, data_type, sval)
Converts sval, a pointer to your struct, into a Ruby object.
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
void rb_p(VALUE obj)
Inspects an object.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Internal header for Complex.
Internal header for Rational.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
This is the struct that holds necessary info for a struct.
const char * wrap_struct_name
Name of structs of this kind.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool rb_integer_type_p(VALUE obj)
Queries if the object is an instance of rb_cInteger.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
@ RUBY_T_MASK
Bitmask of ruby_value_type.