Ruby  3.4.0dev (2024-11-22 revision 0989400a925cd201defdca9eb28eb87200b30785)
compile.c (0989400a925cd201defdca9eb28eb87200b30785)
1 /**********************************************************************
2 
3  compile.c - ruby node tree -> VM instruction sequence
4 
5  $Author$
6  created at: 04/01/01 03:42:15 JST
7 
8  Copyright (C) 2004-2007 Koichi Sasada
9 
10 **********************************************************************/
11 
12 #include "ruby/internal/config.h"
13 #include <math.h>
14 
15 #ifdef HAVE_DLADDR
16 # include <dlfcn.h>
17 #endif
18 
19 #include "encindex.h"
20 #include "id_table.h"
21 #include "internal.h"
22 #include "internal/array.h"
23 #include "internal/compile.h"
24 #include "internal/complex.h"
25 #include "internal/encoding.h"
26 #include "internal/error.h"
27 #include "internal/gc.h"
28 #include "internal/hash.h"
29 #include "internal/io.h"
30 #include "internal/numeric.h"
31 #include "internal/object.h"
32 #include "internal/rational.h"
33 #include "internal/re.h"
34 #include "internal/ruby_parser.h"
35 #include "internal/symbol.h"
36 #include "internal/thread.h"
37 #include "internal/variable.h"
38 #include "iseq.h"
39 #include "ruby/ractor.h"
40 #include "ruby/re.h"
41 #include "ruby/util.h"
42 #include "vm_core.h"
43 #include "vm_callinfo.h"
44 #include "vm_debug.h"
45 #include "yjit.h"
46 
47 #include "builtin.h"
48 #include "insns.inc"
49 #include "insns_info.inc"
50 
51 #define FIXNUM_INC(n, i) ((n)+(INT2FIX(i)&~FIXNUM_FLAG))
52 #define FIXNUM_OR(n, i) ((n)|INT2FIX(i))
53 
54 typedef struct iseq_link_element {
55  enum {
56  ISEQ_ELEMENT_ANCHOR,
57  ISEQ_ELEMENT_LABEL,
58  ISEQ_ELEMENT_INSN,
59  ISEQ_ELEMENT_ADJUST,
60  ISEQ_ELEMENT_TRACE,
61  } type;
62  struct iseq_link_element *next;
63  struct iseq_link_element *prev;
64 } LINK_ELEMENT;
65 
66 typedef struct iseq_link_anchor {
67  LINK_ELEMENT anchor;
68  LINK_ELEMENT *last;
69 } LINK_ANCHOR;
70 
71 typedef enum {
72  LABEL_RESCUE_NONE,
73  LABEL_RESCUE_BEG,
74  LABEL_RESCUE_END,
75  LABEL_RESCUE_TYPE_MAX
76 } LABEL_RESCUE_TYPE;
77 
78 typedef struct iseq_label_data {
79  LINK_ELEMENT link;
80  int label_no;
81  int position;
82  int sc_state;
83  int sp;
84  int refcnt;
85  unsigned int set: 1;
86  unsigned int rescued: 2;
87  unsigned int unremovable: 1;
88 } LABEL;
89 
90 typedef struct iseq_insn_data {
91  LINK_ELEMENT link;
92  enum ruby_vminsn_type insn_id;
93  int operand_size;
94  int sc_state;
95  VALUE *operands;
96  struct {
97  int line_no;
98  int node_id;
99  rb_event_flag_t events;
100  } insn_info;
101 } INSN;
102 
103 typedef struct iseq_adjust_data {
104  LINK_ELEMENT link;
105  LABEL *label;
106  int line_no;
107 } ADJUST;
108 
109 typedef struct iseq_trace_data {
110  LINK_ELEMENT link;
111  rb_event_flag_t event;
112  long data;
113 } TRACE;
114 
115 struct ensure_range {
116  LABEL *begin;
117  LABEL *end;
118  struct ensure_range *next;
119 };
120 
122  const void *ensure_node;
124  struct ensure_range *erange;
125 };
126 
127 const ID rb_iseq_shared_exc_local_tbl[] = {idERROR_INFO};
128 
142 #ifndef CPDEBUG
143 #define CPDEBUG 0
144 #endif
145 
146 #if CPDEBUG >= 0
147 #define compile_debug CPDEBUG
148 #else
149 #define compile_debug ISEQ_COMPILE_DATA(iseq)->option->debug_level
150 #endif
151 
152 #if CPDEBUG
153 
154 #define compile_debug_print_indent(level) \
155  ruby_debug_print_indent((level), compile_debug, gl_node_level * 2)
156 
157 #define debugp(header, value) (void) \
158  (compile_debug_print_indent(1) && \
159  ruby_debug_print_value(1, compile_debug, (header), (value)))
160 
161 #define debugi(header, id) (void) \
162  (compile_debug_print_indent(1) && \
163  ruby_debug_print_id(1, compile_debug, (header), (id)))
164 
165 #define debugp_param(header, value) (void) \
166  (compile_debug_print_indent(1) && \
167  ruby_debug_print_value(1, compile_debug, (header), (value)))
168 
169 #define debugp_verbose(header, value) (void) \
170  (compile_debug_print_indent(2) && \
171  ruby_debug_print_value(2, compile_debug, (header), (value)))
172 
173 #define debugp_verbose_node(header, value) (void) \
174  (compile_debug_print_indent(10) && \
175  ruby_debug_print_value(10, compile_debug, (header), (value)))
176 
177 #define debug_node_start(node) ((void) \
178  (compile_debug_print_indent(1) && \
179  (ruby_debug_print_node(1, CPDEBUG, "", (const NODE *)(node)), gl_node_level)), \
180  gl_node_level++)
181 
182 #define debug_node_end() gl_node_level --
183 
184 #else
185 
186 #define debugi(header, id) ((void)0)
187 #define debugp(header, value) ((void)0)
188 #define debugp_verbose(header, value) ((void)0)
189 #define debugp_verbose_node(header, value) ((void)0)
190 #define debugp_param(header, value) ((void)0)
191 #define debug_node_start(node) ((void)0)
192 #define debug_node_end() ((void)0)
193 #endif
194 
195 #if CPDEBUG > 1 || CPDEBUG < 0
196 #undef printf
197 #define printf ruby_debug_printf
198 #define debugs if (compile_debug_print_indent(1)) ruby_debug_printf
199 #define debug_compile(msg, v) ((void)(compile_debug_print_indent(1) && fputs((msg), stderr)), (v))
200 #else
201 #define debugs if(0)printf
202 #define debug_compile(msg, v) (v)
203 #endif
204 
205 #define LVAR_ERRINFO (1)
206 
207 /* create new label */
208 #define NEW_LABEL(l) new_label_body(iseq, (l))
209 #define LABEL_FORMAT "<L%03d>"
210 
211 #define NEW_ISEQ(node, name, type, line_no) \
212  new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
213 
214 #define NEW_CHILD_ISEQ(node, name, type, line_no) \
215  new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
216 
217 /* add instructions */
218 #define ADD_SEQ(seq1, seq2) \
219  APPEND_LIST((seq1), (seq2))
220 
221 /* add an instruction */
222 #define ADD_INSN(seq, line_node, insn) \
223  ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 0))
224 
225 /* add an instruction with the given line number and node id */
226 #define ADD_SYNTHETIC_INSN(seq, line_no, node_id, insn) \
227  ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (line_no), (node_id), BIN(insn), 0))
228 
229 /* insert an instruction before next */
230 #define INSERT_BEFORE_INSN(next, line_no, node_id, insn) \
231  ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
232 
233 /* insert an instruction after prev */
234 #define INSERT_AFTER_INSN(prev, line_no, node_id, insn) \
235  ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) new_insn_body(iseq, line_no, node_id, BIN(insn), 0))
236 
237 /* add an instruction with some operands (1, 2, 3, 5) */
238 #define ADD_INSN1(seq, line_node, insn, op1) \
239  ADD_ELEM((seq), (LINK_ELEMENT *) \
240  new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 1, (VALUE)(op1)))
241 
242 /* insert an instruction with some operands (1, 2, 3, 5) before next */
243 #define INSERT_BEFORE_INSN1(next, line_no, node_id, insn, op1) \
244  ELEM_INSERT_PREV(&(next)->link, (LINK_ELEMENT *) \
245  new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
246 
247 /* insert an instruction with some operands (1, 2, 3, 5) after prev */
248 #define INSERT_AFTER_INSN1(prev, line_no, node_id, insn, op1) \
249  ELEM_INSERT_NEXT(&(prev)->link, (LINK_ELEMENT *) \
250  new_insn_body(iseq, line_no, node_id, BIN(insn), 1, (VALUE)(op1)))
251 
252 #define LABEL_REF(label) ((label)->refcnt++)
253 
254 /* add an instruction with label operand (alias of ADD_INSN1) */
255 #define ADD_INSNL(seq, line_node, insn, label) (ADD_INSN1(seq, line_node, insn, label), LABEL_REF(label))
256 
257 #define ADD_INSN2(seq, line_node, insn, op1, op2) \
258  ADD_ELEM((seq), (LINK_ELEMENT *) \
259  new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
260 
261 #define ADD_INSN3(seq, line_node, insn, op1, op2, op3) \
262  ADD_ELEM((seq), (LINK_ELEMENT *) \
263  new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
264 
265 /* Specific Insn factory */
266 #define ADD_SEND(seq, line_node, id, argc) \
267  ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
268 
269 #define ADD_SEND_WITH_FLAG(seq, line_node, id, argc, flag) \
270  ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)(flag), NULL)
271 
272 #define ADD_SEND_WITH_BLOCK(seq, line_node, id, argc, block) \
273  ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
274 
275 #define ADD_CALL_RECEIVER(seq, line_node) \
276  ADD_INSN((seq), (line_node), putself)
277 
278 #define ADD_CALL(seq, line_node, id, argc) \
279  ADD_SEND_R((seq), (line_node), (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
280 
281 #define ADD_CALL_WITH_BLOCK(seq, line_node, id, argc, block) \
282  ADD_SEND_R((seq), (line_node), (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
283 
284 #define ADD_SEND_R(seq, line_node, id, argc, block, flag, keywords) \
285  ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
286 
287 #define ADD_TRACE(seq, event) \
288  ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), 0))
289 #define ADD_TRACE_WITH_DATA(seq, event, data) \
290  ADD_ELEM((seq), (LINK_ELEMENT *)new_trace_body(iseq, (event), (data)))
291 
292 static void iseq_add_getlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, const NODE *const line_node, int idx, int level);
293 static void iseq_add_setlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, const NODE *const line_node, int idx, int level);
294 
295 #define ADD_GETLOCAL(seq, line_node, idx, level) iseq_add_getlocal(iseq, (seq), (line_node), (idx), (level))
296 #define ADD_SETLOCAL(seq, line_node, idx, level) iseq_add_setlocal(iseq, (seq), (line_node), (idx), (level))
297 
298 /* add label */
299 #define ADD_LABEL(seq, label) \
300  ADD_ELEM((seq), (LINK_ELEMENT *) (label))
301 
302 #define APPEND_LABEL(seq, before, label) \
303  APPEND_ELEM((seq), (before), (LINK_ELEMENT *) (label))
304 
305 #define ADD_ADJUST(seq, line_node, label) \
306  ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), nd_line(line_node)))
307 
308 #define ADD_ADJUST_RESTORE(seq, label) \
309  ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
310 
311 #define LABEL_UNREMOVABLE(label) \
312  ((label) ? (LABEL_REF(label), (label)->unremovable=1) : 0)
313 #define ADD_CATCH_ENTRY(type, ls, le, iseqv, lc) do { \
314  VALUE _e = rb_ary_new3(5, (type), \
315  (VALUE)(ls) | 1, (VALUE)(le) | 1, \
316  (VALUE)(iseqv), (VALUE)(lc) | 1); \
317  LABEL_UNREMOVABLE(ls); \
318  LABEL_REF(le); \
319  LABEL_REF(lc); \
320  if (NIL_P(ISEQ_COMPILE_DATA(iseq)->catch_table_ary)) \
321  RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, rb_ary_hidden_new(3)); \
322  rb_ary_push(ISEQ_COMPILE_DATA(iseq)->catch_table_ary, freeze_hide_obj(_e)); \
323 } while (0)
324 
325 /* compile node */
326 #define COMPILE(anchor, desc, node) \
327  (debug_compile("== " desc "\n", \
328  iseq_compile_each(iseq, (anchor), (node), 0)))
329 
330 /* compile node, this node's value will be popped */
331 #define COMPILE_POPPED(anchor, desc, node) \
332  (debug_compile("== " desc "\n", \
333  iseq_compile_each(iseq, (anchor), (node), 1)))
334 
335 /* compile node, which is popped when 'popped' is true */
336 #define COMPILE_(anchor, desc, node, popped) \
337  (debug_compile("== " desc "\n", \
338  iseq_compile_each(iseq, (anchor), (node), (popped))))
339 
340 #define COMPILE_RECV(anchor, desc, node, recv) \
341  (private_recv_p(node) ? \
342  (ADD_INSN(anchor, node, putself), VM_CALL_FCALL) : \
343  COMPILE(anchor, desc, recv) ? 0 : -1)
344 
345 #define OPERAND_AT(insn, idx) \
346  (((INSN*)(insn))->operands[(idx)])
347 
348 #define INSN_OF(insn) \
349  (((INSN*)(insn))->insn_id)
350 
351 #define IS_INSN(link) ((link)->type == ISEQ_ELEMENT_INSN)
352 #define IS_LABEL(link) ((link)->type == ISEQ_ELEMENT_LABEL)
353 #define IS_ADJUST(link) ((link)->type == ISEQ_ELEMENT_ADJUST)
354 #define IS_TRACE(link) ((link)->type == ISEQ_ELEMENT_TRACE)
355 #define IS_INSN_ID(iobj, insn) (INSN_OF(iobj) == BIN(insn))
356 #define IS_NEXT_INSN_ID(link, insn) \
357  ((link)->next && IS_INSN((link)->next) && IS_INSN_ID((link)->next, insn))
358 
359 /* error */
360 #if CPDEBUG > 0
362 #endif
363 RBIMPL_ATTR_FORMAT(RBIMPL_PRINTF_FORMAT, 3, 4)
364 static void
365 append_compile_error(const rb_iseq_t *iseq, int line, const char *fmt, ...)
366 {
367  VALUE err_info = ISEQ_COMPILE_DATA(iseq)->err_info;
368  VALUE file = rb_iseq_path(iseq);
369  VALUE err = err_info == Qtrue ? Qfalse : err_info;
370  va_list args;
371 
372  va_start(args, fmt);
373  err = rb_syntax_error_append(err, file, line, -1, NULL, fmt, args);
374  va_end(args);
375  if (NIL_P(err_info)) {
376  RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->err_info, err);
377  rb_set_errinfo(err);
378  }
379  else if (!err_info) {
380  RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->err_info, Qtrue);
381  }
382  if (compile_debug) {
383  if (SPECIAL_CONST_P(err)) err = rb_eSyntaxError;
384  rb_exc_fatal(err);
385  }
386 }
387 
388 #if 0
389 static void
390 compile_bug(rb_iseq_t *iseq, int line, const char *fmt, ...)
391 {
392  va_list args;
393  va_start(args, fmt);
394  rb_report_bug_valist(rb_iseq_path(iseq), line, fmt, args);
395  va_end(args);
396  abort();
397 }
398 #endif
399 
400 #define COMPILE_ERROR append_compile_error
401 
402 #define ERROR_ARGS_AT(n) iseq, nd_line(n),
403 #define ERROR_ARGS ERROR_ARGS_AT(node)
404 
405 #define EXPECT_NODE(prefix, node, ndtype, errval) \
406 do { \
407  const NODE *error_node = (node); \
408  enum node_type error_type = nd_type(error_node); \
409  if (error_type != (ndtype)) { \
410  COMPILE_ERROR(ERROR_ARGS_AT(error_node) \
411  prefix ": " #ndtype " is expected, but %s", \
412  ruby_node_name(error_type)); \
413  return errval; \
414  } \
415 } while (0)
416 
417 #define EXPECT_NODE_NONULL(prefix, parent, ndtype, errval) \
418 do { \
419  COMPILE_ERROR(ERROR_ARGS_AT(parent) \
420  prefix ": must be " #ndtype ", but 0"); \
421  return errval; \
422 } while (0)
423 
424 #define UNKNOWN_NODE(prefix, node, errval) \
425 do { \
426  const NODE *error_node = (node); \
427  COMPILE_ERROR(ERROR_ARGS_AT(error_node) prefix ": unknown node (%s)", \
428  ruby_node_name(nd_type(error_node))); \
429  return errval; \
430 } while (0)
431 
432 #define COMPILE_OK 1
433 #define COMPILE_NG 0
434 
435 #define CHECK(sub) if (!(sub)) {BEFORE_RETURN;return COMPILE_NG;}
436 #define NO_CHECK(sub) (void)(sub)
437 #define BEFORE_RETURN
438 
439 /* leave name uninitialized so that compiler warn if INIT_ANCHOR is
440  * missing */
441 #define DECL_ANCHOR(name) \
442  LINK_ANCHOR name[1] = {{{ISEQ_ELEMENT_ANCHOR,},}}
443 #define INIT_ANCHOR(name) \
444  (name->last = &name->anchor)
445 
446 static inline VALUE
447 freeze_hide_obj(VALUE obj)
448 {
449  OBJ_FREEZE(obj);
450  RBASIC_CLEAR_CLASS(obj);
451  return obj;
452 }
453 
454 #include "optinsn.inc"
455 #if OPT_INSTRUCTIONS_UNIFICATION
456 #include "optunifs.inc"
457 #endif
458 
459 /* for debug */
460 #if CPDEBUG < 0
461 #define ISEQ_ARG iseq,
462 #define ISEQ_ARG_DECLARE rb_iseq_t *iseq,
463 #else
464 #define ISEQ_ARG
465 #define ISEQ_ARG_DECLARE
466 #endif
467 
468 #if CPDEBUG
469 #define gl_node_level ISEQ_COMPILE_DATA(iseq)->node_level
470 #endif
471 
472 static void dump_disasm_list_with_cursor(const LINK_ELEMENT *link, const LINK_ELEMENT *curr, const LABEL *dest);
473 static void dump_disasm_list(const LINK_ELEMENT *elem);
474 
475 static int insn_data_length(INSN *iobj);
476 static int calc_sp_depth(int depth, INSN *iobj);
477 
478 static INSN *new_insn_body(rb_iseq_t *iseq, int line_no, int node_id, enum ruby_vminsn_type insn_id, int argc, ...);
479 static LABEL *new_label_body(rb_iseq_t *iseq, long line);
480 static ADJUST *new_adjust_body(rb_iseq_t *iseq, LABEL *label, int line);
481 static TRACE *new_trace_body(rb_iseq_t *iseq, rb_event_flag_t event, long data);
482 
483 
484 static int iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *anchor, const NODE *n, int);
485 static int iseq_setup(rb_iseq_t *iseq, LINK_ANCHOR *const anchor);
486 static int iseq_setup_insn(rb_iseq_t *iseq, LINK_ANCHOR *const anchor);
487 static int iseq_optimize(rb_iseq_t *iseq, LINK_ANCHOR *const anchor);
488 static int iseq_insns_unification(rb_iseq_t *iseq, LINK_ANCHOR *const anchor);
489 
490 static int iseq_set_local_table(rb_iseq_t *iseq, const rb_ast_id_table_t *tbl, const NODE *const node_args);
491 static int iseq_set_exception_local_table(rb_iseq_t *iseq);
492 static int iseq_set_arguments(rb_iseq_t *iseq, LINK_ANCHOR *const anchor, const NODE *const node);
493 
494 static int iseq_set_sequence(rb_iseq_t *iseq, LINK_ANCHOR *const anchor);
495 static int iseq_set_exception_table(rb_iseq_t *iseq);
496 static int iseq_set_optargs_table(rb_iseq_t *iseq);
497 
498 static int compile_defined_expr(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, VALUE needstr, bool ignore);
499 static int compile_hash(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, int method_call_keywords, int popped);
500 
501 /*
502  * To make Array to LinkedList, use link_anchor
503  */
504 
505 static void
506 verify_list(ISEQ_ARG_DECLARE const char *info, LINK_ANCHOR *const anchor)
507 {
508 #if CPDEBUG
509  int flag = 0;
510  LINK_ELEMENT *list, *plist;
511 
512  if (!compile_debug) return;
513 
514  list = anchor->anchor.next;
515  plist = &anchor->anchor;
516  while (list) {
517  if (plist != list->prev) {
518  flag += 1;
519  }
520  plist = list;
521  list = list->next;
522  }
523 
524  if (anchor->last != plist && anchor->last != 0) {
525  flag |= 0x70000;
526  }
527 
528  if (flag != 0) {
529  rb_bug("list verify error: %08x (%s)", flag, info);
530  }
531 #endif
532 }
533 #if CPDEBUG < 0
534 #define verify_list(info, anchor) verify_list(iseq, (info), (anchor))
535 #endif
536 
537 static void
538 verify_call_cache(rb_iseq_t *iseq)
539 {
540 #if CPDEBUG
541  VALUE *original = rb_iseq_original_iseq(iseq);
542  size_t i = 0;
543  while (i < ISEQ_BODY(iseq)->iseq_size) {
544  VALUE insn = original[i];
545  const char *types = insn_op_types(insn);
546 
547  for (int j=0; types[j]; j++) {
548  if (types[j] == TS_CALLDATA) {
549  struct rb_call_data *cd = (struct rb_call_data *)original[i+j+1];
550  const struct rb_callinfo *ci = cd->ci;
551  const struct rb_callcache *cc = cd->cc;
552  if (cc != vm_cc_empty()) {
553  vm_ci_dump(ci);
554  rb_bug("call cache is not initialized by vm_cc_empty()");
555  }
556  }
557  }
558  i += insn_len(insn);
559  }
560 
561  for (unsigned int i=0; i<ISEQ_BODY(iseq)->ci_size; i++) {
562  struct rb_call_data *cd = &ISEQ_BODY(iseq)->call_data[i];
563  const struct rb_callinfo *ci = cd->ci;
564  const struct rb_callcache *cc = cd->cc;
565  if (cc != NULL && cc != vm_cc_empty()) {
566  vm_ci_dump(ci);
567  rb_bug("call cache is not initialized by vm_cc_empty()");
568  }
569  }
570 #endif
571 }
572 
573 /*
574  * elem1, elem2 => elem1, elem2, elem
575  */
576 static void
577 ADD_ELEM(ISEQ_ARG_DECLARE LINK_ANCHOR *const anchor, LINK_ELEMENT *elem)
578 {
579  elem->prev = anchor->last;
580  anchor->last->next = elem;
581  anchor->last = elem;
582  verify_list("add", anchor);
583 }
584 
585 /*
586  * elem1, before, elem2 => elem1, before, elem, elem2
587  */
588 static void
589 APPEND_ELEM(ISEQ_ARG_DECLARE LINK_ANCHOR *const anchor, LINK_ELEMENT *before, LINK_ELEMENT *elem)
590 {
591  elem->prev = before;
592  elem->next = before->next;
593  elem->next->prev = elem;
594  before->next = elem;
595  if (before == anchor->last) anchor->last = elem;
596  verify_list("add", anchor);
597 }
598 #if CPDEBUG < 0
599 #define ADD_ELEM(anchor, elem) ADD_ELEM(iseq, (anchor), (elem))
600 #define APPEND_ELEM(anchor, before, elem) APPEND_ELEM(iseq, (anchor), (before), (elem))
601 #endif
602 
603 static int
604 branch_coverage_valid_p(rb_iseq_t *iseq, int first_line)
605 {
606  if (!ISEQ_COVERAGE(iseq)) return 0;
607  if (!ISEQ_BRANCH_COVERAGE(iseq)) return 0;
608  if (first_line <= 0) return 0;
609  return 1;
610 }
611 
612 #define PTR2NUM(x) (rb_int2inum((intptr_t)(void *)(x)))
613 
614 static VALUE
615 setup_branch(const rb_code_location_t *loc, const char *type, VALUE structure, VALUE key)
616 {
617  const int first_lineno = loc->beg_pos.lineno, first_column = loc->beg_pos.column;
618  const int last_lineno = loc->end_pos.lineno, last_column = loc->end_pos.column;
619  VALUE branch = rb_ary_hidden_new(6);
620 
621  rb_hash_aset(structure, key, branch);
622  rb_ary_push(branch, ID2SYM(rb_intern(type)));
623  rb_ary_push(branch, INT2FIX(first_lineno));
624  rb_ary_push(branch, INT2FIX(first_column));
625  rb_ary_push(branch, INT2FIX(last_lineno));
626  rb_ary_push(branch, INT2FIX(last_column));
627  return branch;
628 }
629 
630 static VALUE
631 decl_branch_base(rb_iseq_t *iseq, VALUE key, const rb_code_location_t *loc, const char *type)
632 {
633  if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno)) return Qundef;
634 
635  /*
636  * if !structure[node]
637  * structure[node] = [type, first_lineno, first_column, last_lineno, last_column, branches = {}]
638  * else
639  * branches = structure[node][5]
640  * end
641  */
642 
643  VALUE structure = RARRAY_AREF(ISEQ_BRANCH_COVERAGE(iseq), 0);
644  VALUE branch_base = rb_hash_aref(structure, key);
645  VALUE branches;
646 
647  if (NIL_P(branch_base)) {
648  branch_base = setup_branch(loc, type, structure, key);
649  branches = rb_hash_new();
650  rb_obj_hide(branches);
651  rb_ary_push(branch_base, branches);
652  }
653  else {
654  branches = RARRAY_AREF(branch_base, 5);
655  }
656 
657  return branches;
658 }
659 
660 static NODE
661 generate_dummy_line_node(int lineno, int node_id)
662 {
663  NODE dummy = { 0 };
664  nd_set_line(&dummy, lineno);
665  nd_set_node_id(&dummy, node_id);
666  return dummy;
667 }
668 
669 static void
670 add_trace_branch_coverage(rb_iseq_t *iseq, LINK_ANCHOR *const seq, const rb_code_location_t *loc, int node_id, int branch_id, const char *type, VALUE branches)
671 {
672  if (!branch_coverage_valid_p(iseq, loc->beg_pos.lineno)) return;
673 
674  /*
675  * if !branches[branch_id]
676  * branches[branch_id] = [type, first_lineno, first_column, last_lineno, last_column, counter_idx]
677  * else
678  * counter_idx= branches[branch_id][5]
679  * end
680  */
681 
682  VALUE key = INT2FIX(branch_id);
683  VALUE branch = rb_hash_aref(branches, key);
684  long counter_idx;
685 
686  if (NIL_P(branch)) {
687  branch = setup_branch(loc, type, branches, key);
688  VALUE counters = RARRAY_AREF(ISEQ_BRANCH_COVERAGE(iseq), 1);
689  counter_idx = RARRAY_LEN(counters);
690  rb_ary_push(branch, LONG2FIX(counter_idx));
691  rb_ary_push(counters, INT2FIX(0));
692  }
693  else {
694  counter_idx = FIX2LONG(RARRAY_AREF(branch, 5));
695  }
696 
697  ADD_TRACE_WITH_DATA(seq, RUBY_EVENT_COVERAGE_BRANCH, counter_idx);
698  ADD_SYNTHETIC_INSN(seq, loc->end_pos.lineno, node_id, nop);
699 }
700 
701 #define ISEQ_LAST_LINE(iseq) (ISEQ_COMPILE_DATA(iseq)->last_line)
702 
703 static int
704 validate_label(st_data_t name, st_data_t label, st_data_t arg)
705 {
706  rb_iseq_t *iseq = (rb_iseq_t *)arg;
707  LABEL *lobj = (LABEL *)label;
708  if (!lobj->link.next) {
709  do {
710  COMPILE_ERROR(iseq, lobj->position,
711  "%"PRIsVALUE": undefined label",
712  rb_sym2str((VALUE)name));
713  } while (0);
714  }
715  return ST_CONTINUE;
716 }
717 
718 static void
719 validate_labels(rb_iseq_t *iseq, st_table *labels_table)
720 {
721  st_foreach(labels_table, validate_label, (st_data_t)iseq);
722  st_free_table(labels_table);
723 }
724 
725 static NODE *
726 get_nd_recv(const NODE *node)
727 {
728  switch (nd_type(node)) {
729  case NODE_CALL:
730  return RNODE_CALL(node)->nd_recv;
731  case NODE_OPCALL:
732  return RNODE_OPCALL(node)->nd_recv;
733  case NODE_FCALL:
734  return 0;
735  case NODE_QCALL:
736  return RNODE_QCALL(node)->nd_recv;
737  case NODE_VCALL:
738  return 0;
739  case NODE_ATTRASGN:
740  return RNODE_ATTRASGN(node)->nd_recv;
741  case NODE_OP_ASGN1:
742  return RNODE_OP_ASGN1(node)->nd_recv;
743  case NODE_OP_ASGN2:
744  return RNODE_OP_ASGN2(node)->nd_recv;
745  default:
746  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
747  }
748 }
749 
750 static ID
751 get_node_call_nd_mid(const NODE *node)
752 {
753  switch (nd_type(node)) {
754  case NODE_CALL:
755  return RNODE_CALL(node)->nd_mid;
756  case NODE_OPCALL:
757  return RNODE_OPCALL(node)->nd_mid;
758  case NODE_FCALL:
759  return RNODE_FCALL(node)->nd_mid;
760  case NODE_QCALL:
761  return RNODE_QCALL(node)->nd_mid;
762  case NODE_VCALL:
763  return RNODE_VCALL(node)->nd_mid;
764  case NODE_ATTRASGN:
765  return RNODE_ATTRASGN(node)->nd_mid;
766  default:
767  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
768  }
769 }
770 
771 static NODE *
772 get_nd_args(const NODE *node)
773 {
774  switch (nd_type(node)) {
775  case NODE_CALL:
776  return RNODE_CALL(node)->nd_args;
777  case NODE_OPCALL:
778  return RNODE_OPCALL(node)->nd_args;
779  case NODE_FCALL:
780  return RNODE_FCALL(node)->nd_args;
781  case NODE_QCALL:
782  return RNODE_QCALL(node)->nd_args;
783  case NODE_VCALL:
784  return 0;
785  case NODE_ATTRASGN:
786  return RNODE_ATTRASGN(node)->nd_args;
787  default:
788  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
789  }
790 }
791 
792 static ID
793 get_node_colon_nd_mid(const NODE *node)
794 {
795  switch (nd_type(node)) {
796  case NODE_COLON2:
797  return RNODE_COLON2(node)->nd_mid;
798  case NODE_COLON3:
799  return RNODE_COLON3(node)->nd_mid;
800  default:
801  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
802  }
803 }
804 
805 static ID
806 get_nd_vid(const NODE *node)
807 {
808  switch (nd_type(node)) {
809  case NODE_LASGN:
810  return RNODE_LASGN(node)->nd_vid;
811  case NODE_DASGN:
812  return RNODE_DASGN(node)->nd_vid;
813  case NODE_IASGN:
814  return RNODE_IASGN(node)->nd_vid;
815  case NODE_CVASGN:
816  return RNODE_CVASGN(node)->nd_vid;
817  default:
818  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
819  }
820 }
821 
822 static NODE *
823 get_nd_value(const NODE *node)
824 {
825  switch (nd_type(node)) {
826  case NODE_LASGN:
827  return RNODE_LASGN(node)->nd_value;
828  case NODE_DASGN:
829  return RNODE_DASGN(node)->nd_value;
830  default:
831  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
832  }
833 }
834 
835 static VALUE
836 get_string_value(const NODE *node)
837 {
838  switch (nd_type(node)) {
839  case NODE_STR:
840  return rb_node_str_string_val(node);
841  case NODE_FILE:
842  return rb_node_file_path_val(node);
843  default:
844  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
845  }
846 }
847 
848 VALUE
849 rb_iseq_compile_callback(rb_iseq_t *iseq, const struct rb_iseq_new_with_callback_callback_func * ifunc)
850 {
851  DECL_ANCHOR(ret);
852  INIT_ANCHOR(ret);
853 
854  (*ifunc->func)(iseq, ret, ifunc->data);
855 
856  ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
857 
858  CHECK(iseq_setup_insn(iseq, ret));
859  return iseq_setup(iseq, ret);
860 }
861 
862 static bool drop_unreachable_return(LINK_ANCHOR *ret);
863 
864 VALUE
865 rb_iseq_compile_node(rb_iseq_t *iseq, const NODE *node)
866 {
867  DECL_ANCHOR(ret);
868  INIT_ANCHOR(ret);
869 
870  if (node == 0) {
871  NO_CHECK(COMPILE(ret, "nil", node));
872  iseq_set_local_table(iseq, 0, 0);
873  }
874  /* assume node is T_NODE */
875  else if (nd_type_p(node, NODE_SCOPE)) {
876  /* iseq type of top, method, class, block */
877  iseq_set_local_table(iseq, RNODE_SCOPE(node)->nd_tbl, (NODE *)RNODE_SCOPE(node)->nd_args);
878  iseq_set_arguments(iseq, ret, (NODE *)RNODE_SCOPE(node)->nd_args);
879 
880  switch (ISEQ_BODY(iseq)->type) {
881  case ISEQ_TYPE_BLOCK:
882  {
883  LABEL *start = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(0);
884  LABEL *end = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(0);
885 
886  start->rescued = LABEL_RESCUE_BEG;
887  end->rescued = LABEL_RESCUE_END;
888 
889  ADD_TRACE(ret, RUBY_EVENT_B_CALL);
890  ADD_SYNTHETIC_INSN(ret, ISEQ_BODY(iseq)->location.first_lineno, -1, nop);
891  ADD_LABEL(ret, start);
892  CHECK(COMPILE(ret, "block body", RNODE_SCOPE(node)->nd_body));
893  ADD_LABEL(ret, end);
894  ADD_TRACE(ret, RUBY_EVENT_B_RETURN);
895  ISEQ_COMPILE_DATA(iseq)->last_line = ISEQ_BODY(iseq)->location.code_location.end_pos.lineno;
896 
897  /* wide range catch handler must put at last */
898  ADD_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
899  ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, start, end, NULL, end);
900  break;
901  }
902  case ISEQ_TYPE_CLASS:
903  {
904  ADD_TRACE(ret, RUBY_EVENT_CLASS);
905  CHECK(COMPILE(ret, "scoped node", RNODE_SCOPE(node)->nd_body));
906  ADD_TRACE(ret, RUBY_EVENT_END);
907  ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
908  break;
909  }
910  case ISEQ_TYPE_METHOD:
911  {
912  ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
913  ADD_TRACE(ret, RUBY_EVENT_CALL);
914  CHECK(COMPILE(ret, "scoped node", RNODE_SCOPE(node)->nd_body));
915  ISEQ_COMPILE_DATA(iseq)->root_node = RNODE_SCOPE(node)->nd_body;
916  ADD_TRACE(ret, RUBY_EVENT_RETURN);
917  ISEQ_COMPILE_DATA(iseq)->last_line = nd_line(node);
918  break;
919  }
920  default: {
921  CHECK(COMPILE(ret, "scoped node", RNODE_SCOPE(node)->nd_body));
922  break;
923  }
924  }
925  }
926  else {
927  const char *m;
928 #define INVALID_ISEQ_TYPE(type) \
929  ISEQ_TYPE_##type: m = #type; goto invalid_iseq_type
930  switch (ISEQ_BODY(iseq)->type) {
931  case INVALID_ISEQ_TYPE(METHOD);
932  case INVALID_ISEQ_TYPE(CLASS);
933  case INVALID_ISEQ_TYPE(BLOCK);
934  case INVALID_ISEQ_TYPE(EVAL);
935  case INVALID_ISEQ_TYPE(MAIN);
936  case INVALID_ISEQ_TYPE(TOP);
937 #undef INVALID_ISEQ_TYPE /* invalid iseq types end */
938  case ISEQ_TYPE_RESCUE:
939  iseq_set_exception_local_table(iseq);
940  CHECK(COMPILE(ret, "rescue", node));
941  break;
942  case ISEQ_TYPE_ENSURE:
943  iseq_set_exception_local_table(iseq);
944  CHECK(COMPILE_POPPED(ret, "ensure", node));
945  break;
946  case ISEQ_TYPE_PLAIN:
947  CHECK(COMPILE(ret, "ensure", node));
948  break;
949  default:
950  COMPILE_ERROR(ERROR_ARGS "unknown scope: %d", ISEQ_BODY(iseq)->type);
951  return COMPILE_NG;
952  invalid_iseq_type:
953  COMPILE_ERROR(ERROR_ARGS "compile/ISEQ_TYPE_%s should not be reached", m);
954  return COMPILE_NG;
955  }
956  }
957 
958  if (ISEQ_BODY(iseq)->type == ISEQ_TYPE_RESCUE || ISEQ_BODY(iseq)->type == ISEQ_TYPE_ENSURE) {
959  NODE dummy_line_node = generate_dummy_line_node(0, -1);
960  ADD_GETLOCAL(ret, &dummy_line_node, LVAR_ERRINFO, 0);
961  ADD_INSN1(ret, &dummy_line_node, throw, INT2FIX(0) /* continue throw */ );
962  }
963  else if (!drop_unreachable_return(ret)) {
964  ADD_SYNTHETIC_INSN(ret, ISEQ_COMPILE_DATA(iseq)->last_line, -1, leave);
965  }
966 
967 #if OPT_SUPPORT_JOKE
968  if (ISEQ_COMPILE_DATA(iseq)->labels_table) {
969  st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
970  ISEQ_COMPILE_DATA(iseq)->labels_table = 0;
971  validate_labels(iseq, labels_table);
972  }
973 #endif
974  CHECK(iseq_setup_insn(iseq, ret));
975  return iseq_setup(iseq, ret);
976 }
977 
978 static int
979 rb_iseq_translate_threaded_code(rb_iseq_t *iseq)
980 {
981 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
982  const void * const *table = rb_vm_get_insns_address_table();
983  unsigned int i;
984  VALUE *encoded = (VALUE *)ISEQ_BODY(iseq)->iseq_encoded;
985 
986  for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; /* */ ) {
987  int insn = (int)ISEQ_BODY(iseq)->iseq_encoded[i];
988  int len = insn_len(insn);
989  encoded[i] = (VALUE)table[insn];
990  i += len;
991  }
992  FL_SET((VALUE)iseq, ISEQ_TRANSLATED);
993 #endif
994 
995 #if USE_YJIT
996  rb_yjit_live_iseq_count++;
997  rb_yjit_iseq_alloc_count++;
998 #endif
999 
1000  return COMPILE_OK;
1001 }
1002 
1003 VALUE *
1004 rb_iseq_original_iseq(const rb_iseq_t *iseq) /* cold path */
1005 {
1006  VALUE *original_code;
1007 
1008  if (ISEQ_ORIGINAL_ISEQ(iseq)) return ISEQ_ORIGINAL_ISEQ(iseq);
1009  original_code = ISEQ_ORIGINAL_ISEQ_ALLOC(iseq, ISEQ_BODY(iseq)->iseq_size);
1010  MEMCPY(original_code, ISEQ_BODY(iseq)->iseq_encoded, VALUE, ISEQ_BODY(iseq)->iseq_size);
1011 
1012 #if OPT_DIRECT_THREADED_CODE || OPT_CALL_THREADED_CODE
1013  {
1014  unsigned int i;
1015 
1016  for (i = 0; i < ISEQ_BODY(iseq)->iseq_size; /* */ ) {
1017  const void *addr = (const void *)original_code[i];
1018  const int insn = rb_vm_insn_addr2insn(addr);
1019 
1020  original_code[i] = insn;
1021  i += insn_len(insn);
1022  }
1023  }
1024 #endif
1025  return original_code;
1026 }
1027 
1028 /*********************************************/
1029 /* definition of data structure for compiler */
1030 /*********************************************/
1031 
1032 /*
1033  * On 32-bit SPARC, GCC by default generates SPARC V7 code that may require
1034  * 8-byte word alignment. On the other hand, Oracle Solaris Studio seems to
1035  * generate SPARCV8PLUS code with unaligned memory access instructions.
1036  * That is why the STRICT_ALIGNMENT is defined only with GCC.
1037  */
1038 #if defined(__sparc) && SIZEOF_VOIDP == 4 && defined(__GNUC__)
1039  #define STRICT_ALIGNMENT
1040 #endif
1041 
1042 /*
1043  * Some OpenBSD platforms (including sparc64) require strict alignment.
1044  */
1045 #if defined(__OpenBSD__)
1046  #include <sys/endian.h>
1047  #ifdef __STRICT_ALIGNMENT
1048  #define STRICT_ALIGNMENT
1049  #endif
1050 #endif
1051 
1052 #ifdef STRICT_ALIGNMENT
1053  #if defined(HAVE_TRUE_LONG_LONG) && SIZEOF_LONG_LONG > SIZEOF_VALUE
1054  #define ALIGNMENT_SIZE SIZEOF_LONG_LONG
1055  #else
1056  #define ALIGNMENT_SIZE SIZEOF_VALUE
1057  #endif
1058  #define PADDING_SIZE_MAX ((size_t)((ALIGNMENT_SIZE) - 1))
1059  #define ALIGNMENT_SIZE_MASK PADDING_SIZE_MAX
1060  /* Note: ALIGNMENT_SIZE == (2 ** N) is expected. */
1061 #else
1062  #define PADDING_SIZE_MAX 0
1063 #endif /* STRICT_ALIGNMENT */
1064 
1065 #ifdef STRICT_ALIGNMENT
1066 /* calculate padding size for aligned memory access */
1067 static size_t
1068 calc_padding(void *ptr, size_t size)
1069 {
1070  size_t mis;
1071  size_t padding = 0;
1072 
1073  mis = (size_t)ptr & ALIGNMENT_SIZE_MASK;
1074  if (mis > 0) {
1075  padding = ALIGNMENT_SIZE - mis;
1076  }
1077 /*
1078  * On 32-bit sparc or equivalents, when a single VALUE is requested
1079  * and padding == sizeof(VALUE), it is clear that no padding is needed.
1080  */
1081 #if ALIGNMENT_SIZE > SIZEOF_VALUE
1082  if (size == sizeof(VALUE) && padding == sizeof(VALUE)) {
1083  padding = 0;
1084  }
1085 #endif
1086 
1087  return padding;
1088 }
1089 #endif /* STRICT_ALIGNMENT */
1090 
1091 static void *
1092 compile_data_alloc_with_arena(struct iseq_compile_data_storage **arena, size_t size)
1093 {
1094  void *ptr = 0;
1095  struct iseq_compile_data_storage *storage = *arena;
1096 #ifdef STRICT_ALIGNMENT
1097  size_t padding = calc_padding((void *)&storage->buff[storage->pos], size);
1098 #else
1099  const size_t padding = 0; /* expected to be optimized by compiler */
1100 #endif /* STRICT_ALIGNMENT */
1101 
1102  if (size >= INT_MAX - padding) rb_memerror();
1103  if (storage->pos + size + padding > storage->size) {
1104  unsigned int alloc_size = storage->size;
1105 
1106  while (alloc_size < size + PADDING_SIZE_MAX) {
1107  if (alloc_size >= INT_MAX / 2) rb_memerror();
1108  alloc_size *= 2;
1109  }
1110  storage->next = (void *)ALLOC_N(char, alloc_size +
1111  offsetof(struct iseq_compile_data_storage, buff));
1112  storage = *arena = storage->next;
1113  storage->next = 0;
1114  storage->pos = 0;
1115  storage->size = alloc_size;
1116 #ifdef STRICT_ALIGNMENT
1117  padding = calc_padding((void *)&storage->buff[storage->pos], size);
1118 #endif /* STRICT_ALIGNMENT */
1119  }
1120 
1121 #ifdef STRICT_ALIGNMENT
1122  storage->pos += (int)padding;
1123 #endif /* STRICT_ALIGNMENT */
1124 
1125  ptr = (void *)&storage->buff[storage->pos];
1126  storage->pos += (int)size;
1127  return ptr;
1128 }
1129 
1130 static void *
1131 compile_data_alloc(rb_iseq_t *iseq, size_t size)
1132 {
1133  struct iseq_compile_data_storage ** arena = &ISEQ_COMPILE_DATA(iseq)->node.storage_current;
1134  return compile_data_alloc_with_arena(arena, size);
1135 }
1136 
1137 static inline void *
1138 compile_data_alloc2(rb_iseq_t *iseq, size_t x, size_t y)
1139 {
1140  size_t size = rb_size_mul_or_raise(x, y, rb_eRuntimeError);
1141  return compile_data_alloc(iseq, size);
1142 }
1143 
1144 static inline void *
1145 compile_data_calloc2(rb_iseq_t *iseq, size_t x, size_t y)
1146 {
1147  size_t size = rb_size_mul_or_raise(x, y, rb_eRuntimeError);
1148  void *p = compile_data_alloc(iseq, size);
1149  memset(p, 0, size);
1150  return p;
1151 }
1152 
1153 static INSN *
1154 compile_data_alloc_insn(rb_iseq_t *iseq)
1155 {
1156  struct iseq_compile_data_storage ** arena = &ISEQ_COMPILE_DATA(iseq)->insn.storage_current;
1157  return (INSN *)compile_data_alloc_with_arena(arena, sizeof(INSN));
1158 }
1159 
1160 static LABEL *
1161 compile_data_alloc_label(rb_iseq_t *iseq)
1162 {
1163  return (LABEL *)compile_data_alloc(iseq, sizeof(LABEL));
1164 }
1165 
1166 static ADJUST *
1167 compile_data_alloc_adjust(rb_iseq_t *iseq)
1168 {
1169  return (ADJUST *)compile_data_alloc(iseq, sizeof(ADJUST));
1170 }
1171 
1172 static TRACE *
1173 compile_data_alloc_trace(rb_iseq_t *iseq)
1174 {
1175  return (TRACE *)compile_data_alloc(iseq, sizeof(TRACE));
1176 }
1177 
1178 /*
1179  * elem1, elemX => elem1, elem2, elemX
1180  */
1181 static void
1182 ELEM_INSERT_NEXT(LINK_ELEMENT *elem1, LINK_ELEMENT *elem2)
1183 {
1184  elem2->next = elem1->next;
1185  elem2->prev = elem1;
1186  elem1->next = elem2;
1187  if (elem2->next) {
1188  elem2->next->prev = elem2;
1189  }
1190 }
1191 
1192 /*
1193  * elem1, elemX => elemX, elem2, elem1
1194  */
1195 static void
1196 ELEM_INSERT_PREV(LINK_ELEMENT *elem1, LINK_ELEMENT *elem2)
1197 {
1198  elem2->prev = elem1->prev;
1199  elem2->next = elem1;
1200  elem1->prev = elem2;
1201  if (elem2->prev) {
1202  elem2->prev->next = elem2;
1203  }
1204 }
1205 
1206 /*
1207  * elemX, elem1, elemY => elemX, elem2, elemY
1208  */
1209 static void
1210 ELEM_REPLACE(LINK_ELEMENT *elem1, LINK_ELEMENT *elem2)
1211 {
1212  elem2->prev = elem1->prev;
1213  elem2->next = elem1->next;
1214  if (elem1->prev) {
1215  elem1->prev->next = elem2;
1216  }
1217  if (elem1->next) {
1218  elem1->next->prev = elem2;
1219  }
1220 }
1221 
1222 static void
1223 ELEM_REMOVE(LINK_ELEMENT *elem)
1224 {
1225  elem->prev->next = elem->next;
1226  if (elem->next) {
1227  elem->next->prev = elem->prev;
1228  }
1229 }
1230 
1231 static LINK_ELEMENT *
1232 FIRST_ELEMENT(const LINK_ANCHOR *const anchor)
1233 {
1234  return anchor->anchor.next;
1235 }
1236 
1237 static LINK_ELEMENT *
1238 LAST_ELEMENT(LINK_ANCHOR *const anchor)
1239 {
1240  return anchor->last;
1241 }
1242 
1243 static LINK_ELEMENT *
1244 ELEM_FIRST_INSN(LINK_ELEMENT *elem)
1245 {
1246  while (elem) {
1247  switch (elem->type) {
1248  case ISEQ_ELEMENT_INSN:
1249  case ISEQ_ELEMENT_ADJUST:
1250  return elem;
1251  default:
1252  elem = elem->next;
1253  }
1254  }
1255  return NULL;
1256 }
1257 
1258 static int
1259 LIST_INSN_SIZE_ONE(const LINK_ANCHOR *const anchor)
1260 {
1261  LINK_ELEMENT *first_insn = ELEM_FIRST_INSN(FIRST_ELEMENT(anchor));
1262  if (first_insn != NULL &&
1263  ELEM_FIRST_INSN(first_insn->next) == NULL) {
1264  return TRUE;
1265  }
1266  else {
1267  return FALSE;
1268  }
1269 }
1270 
1271 static int
1272 LIST_INSN_SIZE_ZERO(const LINK_ANCHOR *const anchor)
1273 {
1274  if (ELEM_FIRST_INSN(FIRST_ELEMENT(anchor)) == NULL) {
1275  return TRUE;
1276  }
1277  else {
1278  return FALSE;
1279  }
1280 }
1281 
1282 /*
1283  * anc1: e1, e2, e3
1284  * anc2: e4, e5
1285  *#=>
1286  * anc1: e1, e2, e3, e4, e5
1287  * anc2: e4, e5 (broken)
1288  */
1289 static void
1290 APPEND_LIST(ISEQ_ARG_DECLARE LINK_ANCHOR *const anc1, LINK_ANCHOR *const anc2)
1291 {
1292  if (anc2->anchor.next) {
1293  anc1->last->next = anc2->anchor.next;
1294  anc2->anchor.next->prev = anc1->last;
1295  anc1->last = anc2->last;
1296  }
1297  verify_list("append", anc1);
1298 }
1299 #if CPDEBUG < 0
1300 #define APPEND_LIST(anc1, anc2) APPEND_LIST(iseq, (anc1), (anc2))
1301 #endif
1302 
1303 #if CPDEBUG && 0
1304 static void
1305 debug_list(ISEQ_ARG_DECLARE LINK_ANCHOR *const anchor, LINK_ELEMENT *cur)
1306 {
1307  LINK_ELEMENT *list = FIRST_ELEMENT(anchor);
1308  printf("----\n");
1309  printf("anch: %p, frst: %p, last: %p\n", (void *)&anchor->anchor,
1310  (void *)anchor->anchor.next, (void *)anchor->last);
1311  while (list) {
1312  printf("curr: %p, next: %p, prev: %p, type: %d\n", (void *)list, (void *)list->next,
1313  (void *)list->prev, (int)list->type);
1314  list = list->next;
1315  }
1316  printf("----\n");
1317 
1318  dump_disasm_list_with_cursor(anchor->anchor.next, cur, 0);
1319  verify_list("debug list", anchor);
1320 }
1321 #if CPDEBUG < 0
1322 #define debug_list(anc, cur) debug_list(iseq, (anc), (cur))
1323 #endif
1324 #else
1325 #define debug_list(anc, cur) ((void)0)
1326 #endif
1327 
1328 static TRACE *
1329 new_trace_body(rb_iseq_t *iseq, rb_event_flag_t event, long data)
1330 {
1331  TRACE *trace = compile_data_alloc_trace(iseq);
1332 
1333  trace->link.type = ISEQ_ELEMENT_TRACE;
1334  trace->link.next = NULL;
1335  trace->event = event;
1336  trace->data = data;
1337 
1338  return trace;
1339 }
1340 
1341 static LABEL *
1342 new_label_body(rb_iseq_t *iseq, long line)
1343 {
1344  LABEL *labelobj = compile_data_alloc_label(iseq);
1345 
1346  labelobj->link.type = ISEQ_ELEMENT_LABEL;
1347  labelobj->link.next = 0;
1348 
1349  labelobj->label_no = ISEQ_COMPILE_DATA(iseq)->label_no++;
1350  labelobj->sc_state = 0;
1351  labelobj->sp = -1;
1352  labelobj->refcnt = 0;
1353  labelobj->set = 0;
1354  labelobj->rescued = LABEL_RESCUE_NONE;
1355  labelobj->unremovable = 0;
1356  labelobj->position = -1;
1357  return labelobj;
1358 }
1359 
1360 static ADJUST *
1361 new_adjust_body(rb_iseq_t *iseq, LABEL *label, int line)
1362 {
1363  ADJUST *adjust = compile_data_alloc_adjust(iseq);
1364  adjust->link.type = ISEQ_ELEMENT_ADJUST;
1365  adjust->link.next = 0;
1366  adjust->label = label;
1367  adjust->line_no = line;
1368  LABEL_UNREMOVABLE(label);
1369  return adjust;
1370 }
1371 
1372 static void
1373 iseq_insn_each_markable_object(INSN *insn, void (*func)(VALUE, VALUE), VALUE data)
1374 {
1375  const char *types = insn_op_types(insn->insn_id);
1376  for (int j = 0; types[j]; j++) {
1377  char type = types[j];
1378  switch (type) {
1379  case TS_CDHASH:
1380  case TS_ISEQ:
1381  case TS_VALUE:
1382  case TS_IC: // constant path array
1383  case TS_CALLDATA: // ci is stored.
1384  func(OPERAND_AT(insn, j), data);
1385  break;
1386  default:
1387  break;
1388  }
1389  }
1390 }
1391 
1392 static void
1393 iseq_insn_each_object_write_barrier(VALUE obj, VALUE iseq)
1394 {
1395  RB_OBJ_WRITTEN(iseq, Qundef, obj);
1396 }
1397 
1398 static INSN *
1399 new_insn_core(rb_iseq_t *iseq, int line_no, int node_id, int insn_id, int argc, VALUE *argv)
1400 {
1401  INSN *iobj = compile_data_alloc_insn(iseq);
1402 
1403  /* printf("insn_id: %d, line: %d\n", insn_id, nd_line(line_node)); */
1404 
1405  iobj->link.type = ISEQ_ELEMENT_INSN;
1406  iobj->link.next = 0;
1407  iobj->insn_id = insn_id;
1408  iobj->insn_info.line_no = line_no;
1409  iobj->insn_info.node_id = node_id;
1410  iobj->insn_info.events = 0;
1411  iobj->operands = argv;
1412  iobj->operand_size = argc;
1413  iobj->sc_state = 0;
1414 
1415  iseq_insn_each_markable_object(iobj, iseq_insn_each_object_write_barrier, (VALUE)iseq);
1416 
1417  return iobj;
1418 }
1419 
1420 static INSN *
1421 new_insn_body(rb_iseq_t *iseq, int line_no, int node_id, enum ruby_vminsn_type insn_id, int argc, ...)
1422 {
1423  VALUE *operands = 0;
1424  va_list argv;
1425  if (argc > 0) {
1426  int i;
1427  va_start(argv, argc);
1428  operands = compile_data_alloc2(iseq, sizeof(VALUE), argc);
1429  for (i = 0; i < argc; i++) {
1430  VALUE v = va_arg(argv, VALUE);
1431  operands[i] = v;
1432  }
1433  va_end(argv);
1434  }
1435  return new_insn_core(iseq, line_no, node_id, insn_id, argc, operands);
1436 }
1437 
1438 static const struct rb_callinfo *
1439 new_callinfo(rb_iseq_t *iseq, ID mid, int argc, unsigned int flag, struct rb_callinfo_kwarg *kw_arg, int has_blockiseq)
1440 {
1441  VM_ASSERT(argc >= 0);
1442 
1443  if (kw_arg) {
1444  flag |= VM_CALL_KWARG;
1445  argc += kw_arg->keyword_len;
1446  }
1447 
1448  if (!(flag & (VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_BLOCKARG | VM_CALL_KWARG | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING))
1449  && !has_blockiseq) {
1450  flag |= VM_CALL_ARGS_SIMPLE;
1451  }
1452 
1453  ISEQ_BODY(iseq)->ci_size++;
1454  const struct rb_callinfo *ci = vm_ci_new(mid, flag, argc, kw_arg);
1455  RB_OBJ_WRITTEN(iseq, Qundef, ci);
1456  return ci;
1457 }
1458 
1459 static INSN *
1460 new_insn_send(rb_iseq_t *iseq, int line_no, int node_id, ID id, VALUE argc, const rb_iseq_t *blockiseq, VALUE flag, struct rb_callinfo_kwarg *keywords)
1461 {
1462  VALUE *operands = compile_data_calloc2(iseq, sizeof(VALUE), 2);
1463  VALUE ci = (VALUE)new_callinfo(iseq, id, FIX2INT(argc), FIX2INT(flag), keywords, blockiseq != NULL);
1464  operands[0] = ci;
1465  operands[1] = (VALUE)blockiseq;
1466  if (blockiseq) {
1467  RB_OBJ_WRITTEN(iseq, Qundef, blockiseq);
1468  }
1469 
1470  INSN *insn;
1471 
1472  if (vm_ci_flag((struct rb_callinfo *)ci) & VM_CALL_FORWARDING) {
1473  insn = new_insn_core(iseq, line_no, node_id, BIN(sendforward), 2, operands);
1474  }
1475  else {
1476  insn = new_insn_core(iseq, line_no, node_id, BIN(send), 2, operands);
1477  }
1478 
1479  RB_OBJ_WRITTEN(iseq, Qundef, ci);
1480  RB_GC_GUARD(ci);
1481  return insn;
1482 }
1483 
1484 static rb_iseq_t *
1485 new_child_iseq(rb_iseq_t *iseq, const NODE *const node,
1486  VALUE name, const rb_iseq_t *parent, enum rb_iseq_type type, int line_no)
1487 {
1488  rb_iseq_t *ret_iseq;
1489  VALUE ast_value = rb_ruby_ast_new(node);
1490 
1491  debugs("[new_child_iseq]> ---------------------------------------\n");
1492  int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1493  ret_iseq = rb_iseq_new_with_opt(ast_value, name,
1494  rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1495  line_no, parent,
1496  isolated_depth ? isolated_depth + 1 : 0,
1497  type, ISEQ_COMPILE_DATA(iseq)->option,
1498  ISEQ_BODY(iseq)->variable.script_lines);
1499  debugs("[new_child_iseq]< ---------------------------------------\n");
1500  return ret_iseq;
1501 }
1502 
1503 static rb_iseq_t *
1504 new_child_iseq_with_callback(rb_iseq_t *iseq, const struct rb_iseq_new_with_callback_callback_func *ifunc,
1505  VALUE name, const rb_iseq_t *parent, enum rb_iseq_type type, int line_no)
1506 {
1507  rb_iseq_t *ret_iseq;
1508 
1509  debugs("[new_child_iseq_with_callback]> ---------------------------------------\n");
1510  ret_iseq = rb_iseq_new_with_callback(ifunc, name,
1511  rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1512  line_no, parent, type, ISEQ_COMPILE_DATA(iseq)->option);
1513  debugs("[new_child_iseq_with_callback]< ---------------------------------------\n");
1514  return ret_iseq;
1515 }
1516 
1517 static void
1518 set_catch_except_p(rb_iseq_t *iseq)
1519 {
1520  RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq));
1521  ISEQ_COMPILE_DATA(iseq)->catch_except_p = true;
1522  if (ISEQ_BODY(iseq)->parent_iseq != NULL) {
1523  if (ISEQ_COMPILE_DATA(ISEQ_BODY(iseq)->parent_iseq)) {
1524  set_catch_except_p((rb_iseq_t *) ISEQ_BODY(iseq)->parent_iseq);
1525  }
1526  }
1527 }
1528 
1529 /* Set body->catch_except_p to true if the ISeq may catch an exception. If it is false,
1530  JIT-ed code may be optimized. If we are extremely conservative, we should set true
1531  if catch table exists. But we want to optimize while loop, which always has catch
1532  table entries for break/next/redo.
1533 
1534  So this function sets true for limited ISeqs with break/next/redo catch table entries
1535  whose child ISeq would really raise an exception. */
1536 static void
1537 update_catch_except_flags(rb_iseq_t *iseq, struct rb_iseq_constant_body *body)
1538 {
1539  unsigned int pos;
1540  size_t i;
1541  int insn;
1542  const struct iseq_catch_table *ct = body->catch_table;
1543 
1544  /* This assumes that a block has parent_iseq which may catch an exception from the block, and that
1545  BREAK/NEXT/REDO catch table entries are used only when `throw` insn is used in the block. */
1546  pos = 0;
1547  while (pos < body->iseq_size) {
1548  insn = rb_vm_insn_decode(body->iseq_encoded[pos]);
1549  if (insn == BIN(throw)) {
1550  set_catch_except_p(iseq);
1551  break;
1552  }
1553  pos += insn_len(insn);
1554  }
1555 
1556  if (ct == NULL)
1557  return;
1558 
1559  for (i = 0; i < ct->size; i++) {
1560  const struct iseq_catch_table_entry *entry =
1561  UNALIGNED_MEMBER_PTR(ct, entries[i]);
1562  if (entry->type != CATCH_TYPE_BREAK
1563  && entry->type != CATCH_TYPE_NEXT
1564  && entry->type != CATCH_TYPE_REDO) {
1565  RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq));
1566  ISEQ_COMPILE_DATA(iseq)->catch_except_p = true;
1567  break;
1568  }
1569  }
1570 }
1571 
1572 static void
1573 iseq_insert_nop_between_end_and_cont(rb_iseq_t *iseq)
1574 {
1575  VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
1576  if (NIL_P(catch_table_ary)) return;
1577  unsigned int i, tlen = (unsigned int)RARRAY_LEN(catch_table_ary);
1578  const VALUE *tptr = RARRAY_CONST_PTR(catch_table_ary);
1579  for (i = 0; i < tlen; i++) {
1580  const VALUE *ptr = RARRAY_CONST_PTR(tptr[i]);
1581  LINK_ELEMENT *end = (LINK_ELEMENT *)(ptr[2] & ~1);
1582  LINK_ELEMENT *cont = (LINK_ELEMENT *)(ptr[4] & ~1);
1583  LINK_ELEMENT *e;
1584 
1585  enum rb_catch_type ct = (enum rb_catch_type)(ptr[0] & 0xffff);
1586 
1587  if (ct != CATCH_TYPE_BREAK
1588  && ct != CATCH_TYPE_NEXT
1589  && ct != CATCH_TYPE_REDO) {
1590 
1591  for (e = end; e && (IS_LABEL(e) || IS_TRACE(e)); e = e->next) {
1592  if (e == cont) {
1593  INSN *nop = new_insn_core(iseq, 0, -1, BIN(nop), 0, 0);
1594  ELEM_INSERT_NEXT(end, &nop->link);
1595  break;
1596  }
1597  }
1598  }
1599  }
1600 
1601  RB_GC_GUARD(catch_table_ary);
1602 }
1603 
1604 static int
1605 iseq_setup_insn(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
1606 {
1607  if (RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1608  return COMPILE_NG;
1609 
1610  /* debugs("[compile step 2] (iseq_array_to_linkedlist)\n"); */
1611 
1612  if (compile_debug > 5)
1613  dump_disasm_list(FIRST_ELEMENT(anchor));
1614 
1615  debugs("[compile step 3.1 (iseq_optimize)]\n");
1616  iseq_optimize(iseq, anchor);
1617 
1618  if (compile_debug > 5)
1619  dump_disasm_list(FIRST_ELEMENT(anchor));
1620 
1621  if (ISEQ_COMPILE_DATA(iseq)->option->instructions_unification) {
1622  debugs("[compile step 3.2 (iseq_insns_unification)]\n");
1623  iseq_insns_unification(iseq, anchor);
1624  if (compile_debug > 5)
1625  dump_disasm_list(FIRST_ELEMENT(anchor));
1626  }
1627 
1628  debugs("[compile step 3.4 (iseq_insert_nop_between_end_and_cont)]\n");
1629  iseq_insert_nop_between_end_and_cont(iseq);
1630  if (compile_debug > 5)
1631  dump_disasm_list(FIRST_ELEMENT(anchor));
1632 
1633  return COMPILE_OK;
1634 }
1635 
1636 static int
1637 iseq_setup(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
1638 {
1639  if (RTEST(ISEQ_COMPILE_DATA(iseq)->err_info))
1640  return COMPILE_NG;
1641 
1642  debugs("[compile step 4.1 (iseq_set_sequence)]\n");
1643  if (!iseq_set_sequence(iseq, anchor)) return COMPILE_NG;
1644  if (compile_debug > 5)
1645  dump_disasm_list(FIRST_ELEMENT(anchor));
1646 
1647  debugs("[compile step 4.2 (iseq_set_exception_table)]\n");
1648  if (!iseq_set_exception_table(iseq)) return COMPILE_NG;
1649 
1650  debugs("[compile step 4.3 (set_optargs_table)] \n");
1651  if (!iseq_set_optargs_table(iseq)) return COMPILE_NG;
1652 
1653  debugs("[compile step 5 (iseq_translate_threaded_code)] \n");
1654  if (!rb_iseq_translate_threaded_code(iseq)) return COMPILE_NG;
1655 
1656  debugs("[compile step 6 (update_catch_except_flags)] \n");
1657  RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq));
1658  update_catch_except_flags(iseq, ISEQ_BODY(iseq));
1659 
1660  debugs("[compile step 6.1 (remove unused catch tables)] \n");
1661  RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq));
1662  if (!ISEQ_COMPILE_DATA(iseq)->catch_except_p && ISEQ_BODY(iseq)->catch_table) {
1663  xfree(ISEQ_BODY(iseq)->catch_table);
1664  ISEQ_BODY(iseq)->catch_table = NULL;
1665  }
1666 
1667 #if VM_INSN_INFO_TABLE_IMPL == 2
1668  if (ISEQ_BODY(iseq)->insns_info.succ_index_table == NULL) {
1669  debugs("[compile step 7 (rb_iseq_insns_info_encode_positions)] \n");
1670  rb_iseq_insns_info_encode_positions(iseq);
1671  }
1672 #endif
1673 
1674  if (compile_debug > 1) {
1675  VALUE str = rb_iseq_disasm(iseq);
1676  printf("%s\n", StringValueCStr(str));
1677  }
1678  verify_call_cache(iseq);
1679  debugs("[compile step: finish]\n");
1680 
1681  return COMPILE_OK;
1682 }
1683 
1684 static int
1685 iseq_set_exception_local_table(rb_iseq_t *iseq)
1686 {
1687  ISEQ_BODY(iseq)->local_table_size = numberof(rb_iseq_shared_exc_local_tbl);
1688  ISEQ_BODY(iseq)->local_table = rb_iseq_shared_exc_local_tbl;
1689  return COMPILE_OK;
1690 }
1691 
1692 static int
1693 get_lvar_level(const rb_iseq_t *iseq)
1694 {
1695  int lev = 0;
1696  while (iseq != ISEQ_BODY(iseq)->local_iseq) {
1697  lev++;
1698  iseq = ISEQ_BODY(iseq)->parent_iseq;
1699  }
1700  return lev;
1701 }
1702 
1703 static int
1704 get_dyna_var_idx_at_raw(const rb_iseq_t *iseq, ID id)
1705 {
1706  unsigned int i;
1707 
1708  for (i = 0; i < ISEQ_BODY(iseq)->local_table_size; i++) {
1709  if (ISEQ_BODY(iseq)->local_table[i] == id) {
1710  return (int)i;
1711  }
1712  }
1713  return -1;
1714 }
1715 
1716 static int
1717 get_local_var_idx(const rb_iseq_t *iseq, ID id)
1718 {
1719  int idx = get_dyna_var_idx_at_raw(ISEQ_BODY(iseq)->local_iseq, id);
1720 
1721  if (idx < 0) {
1722  COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq),
1723  "get_local_var_idx: %d", idx);
1724  }
1725 
1726  return idx;
1727 }
1728 
1729 static int
1730 get_dyna_var_idx(const rb_iseq_t *iseq, ID id, int *level, int *ls)
1731 {
1732  int lv = 0, idx = -1;
1733  const rb_iseq_t *const topmost_iseq = iseq;
1734 
1735  while (iseq) {
1736  idx = get_dyna_var_idx_at_raw(iseq, id);
1737  if (idx >= 0) {
1738  break;
1739  }
1740  iseq = ISEQ_BODY(iseq)->parent_iseq;
1741  lv++;
1742  }
1743 
1744  if (idx < 0) {
1745  COMPILE_ERROR(topmost_iseq, ISEQ_LAST_LINE(topmost_iseq),
1746  "get_dyna_var_idx: -1");
1747  }
1748 
1749  *level = lv;
1750  *ls = ISEQ_BODY(iseq)->local_table_size;
1751  return idx;
1752 }
1753 
1754 static int
1755 iseq_local_block_param_p(const rb_iseq_t *iseq, unsigned int idx, unsigned int level)
1756 {
1757  const struct rb_iseq_constant_body *body;
1758  while (level > 0) {
1759  iseq = ISEQ_BODY(iseq)->parent_iseq;
1760  level--;
1761  }
1762  body = ISEQ_BODY(iseq);
1763  if (body->local_iseq == iseq && /* local variables */
1764  body->param.flags.has_block &&
1765  body->local_table_size - body->param.block_start == idx) {
1766  return TRUE;
1767  }
1768  else {
1769  return FALSE;
1770  }
1771 }
1772 
1773 static int
1774 iseq_block_param_id_p(const rb_iseq_t *iseq, ID id, int *pidx, int *plevel)
1775 {
1776  int level, ls;
1777  int idx = get_dyna_var_idx(iseq, id, &level, &ls);
1778  if (iseq_local_block_param_p(iseq, ls - idx, level)) {
1779  *pidx = ls - idx;
1780  *plevel = level;
1781  return TRUE;
1782  }
1783  else {
1784  return FALSE;
1785  }
1786 }
1787 
1788 static void
1789 access_outer_variables(const rb_iseq_t *iseq, int level, ID id, bool write)
1790 {
1791  int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1792 
1793  if (isolated_depth && level >= isolated_depth) {
1794  if (id == rb_intern("yield")) {
1795  COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq), "can not yield from isolated Proc");
1796  }
1797  else {
1798  COMPILE_ERROR(iseq, ISEQ_LAST_LINE(iseq), "can not access variable '%s' from isolated Proc", rb_id2name(id));
1799  }
1800  }
1801 
1802  for (int i=0; i<level; i++) {
1803  VALUE val;
1804  struct rb_id_table *ovs = ISEQ_BODY(iseq)->outer_variables;
1805 
1806  if (!ovs) {
1807  ovs = ISEQ_BODY(iseq)->outer_variables = rb_id_table_create(8);
1808  }
1809 
1810  if (rb_id_table_lookup(ISEQ_BODY(iseq)->outer_variables, id, &val)) {
1811  if (write && !val) {
1812  rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables, id, Qtrue);
1813  }
1814  }
1815  else {
1816  rb_id_table_insert(ISEQ_BODY(iseq)->outer_variables, id, RBOOL(write));
1817  }
1818 
1819  iseq = ISEQ_BODY(iseq)->parent_iseq;
1820  }
1821 }
1822 
1823 static ID
1824 iseq_lvar_id(const rb_iseq_t *iseq, int idx, int level)
1825 {
1826  for (int i=0; i<level; i++) {
1827  iseq = ISEQ_BODY(iseq)->parent_iseq;
1828  }
1829 
1830  ID id = ISEQ_BODY(iseq)->local_table[ISEQ_BODY(iseq)->local_table_size - idx];
1831  // fprintf(stderr, "idx:%d level:%d ID:%s\n", idx, level, rb_id2name(id));
1832  return id;
1833 }
1834 
1835 static void
1836 iseq_add_getlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, const NODE *const line_node, int idx, int level)
1837 {
1838  if (iseq_local_block_param_p(iseq, idx, level)) {
1839  ADD_INSN2(seq, line_node, getblockparam, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level));
1840  }
1841  else {
1842  ADD_INSN2(seq, line_node, getlocal, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level));
1843  }
1844  if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level), Qfalse);
1845 }
1846 
1847 static void
1848 iseq_add_setlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, const NODE *const line_node, int idx, int level)
1849 {
1850  if (iseq_local_block_param_p(iseq, idx, level)) {
1851  ADD_INSN2(seq, line_node, setblockparam, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level));
1852  }
1853  else {
1854  ADD_INSN2(seq, line_node, setlocal, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level));
1855  }
1856  if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level), Qtrue);
1857 }
1858 
1859 
1860 
1861 static void
1862 iseq_calc_param_size(rb_iseq_t *iseq)
1863 {
1864  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
1865  if (body->param.flags.has_opt ||
1866  body->param.flags.has_post ||
1867  body->param.flags.has_rest ||
1868  body->param.flags.has_block ||
1869  body->param.flags.has_kw ||
1870  body->param.flags.has_kwrest) {
1871 
1872  if (body->param.flags.has_block) {
1873  body->param.size = body->param.block_start + 1;
1874  }
1875  else if (body->param.flags.has_kwrest) {
1876  body->param.size = body->param.keyword->rest_start + 1;
1877  }
1878  else if (body->param.flags.has_kw) {
1879  body->param.size = body->param.keyword->bits_start + 1;
1880  }
1881  else if (body->param.flags.has_post) {
1882  body->param.size = body->param.post_start + body->param.post_num;
1883  }
1884  else if (body->param.flags.has_rest) {
1885  body->param.size = body->param.rest_start + 1;
1886  }
1887  else if (body->param.flags.has_opt) {
1888  body->param.size = body->param.lead_num + body->param.opt_num;
1889  }
1890  else {
1891  UNREACHABLE;
1892  }
1893  }
1894  else {
1895  body->param.size = body->param.lead_num;
1896  }
1897 }
1898 
1899 static int
1900 iseq_set_arguments_keywords(rb_iseq_t *iseq, LINK_ANCHOR *const optargs,
1901  const struct rb_args_info *args, int arg_size)
1902 {
1903  const rb_node_kw_arg_t *node = args->kw_args;
1904  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
1905  struct rb_iseq_param_keyword *keyword;
1906  const VALUE default_values = rb_ary_hidden_new(1);
1907  const VALUE complex_mark = rb_str_tmp_new(0);
1908  int kw = 0, rkw = 0, di = 0, i;
1909 
1910  body->param.flags.has_kw = TRUE;
1911  body->param.keyword = keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
1912 
1913  while (node) {
1914  kw++;
1915  node = node->nd_next;
1916  }
1917  arg_size += kw;
1918  keyword->bits_start = arg_size++;
1919 
1920  node = args->kw_args;
1921  while (node) {
1922  const NODE *val_node = get_nd_value(node->nd_body);
1923  VALUE dv;
1924 
1925  if (val_node == NODE_SPECIAL_REQUIRED_KEYWORD) {
1926  ++rkw;
1927  }
1928  else {
1929  switch (nd_type(val_node)) {
1930  case NODE_SYM:
1931  dv = rb_node_sym_string_val(val_node);
1932  break;
1933  case NODE_REGX:
1934  dv = rb_node_regx_string_val(val_node);
1935  break;
1936  case NODE_LINE:
1937  dv = rb_node_line_lineno_val(val_node);
1938  break;
1939  case NODE_INTEGER:
1940  dv = rb_node_integer_literal_val(val_node);
1941  break;
1942  case NODE_FLOAT:
1943  dv = rb_node_float_literal_val(val_node);
1944  break;
1945  case NODE_RATIONAL:
1946  dv = rb_node_rational_literal_val(val_node);
1947  break;
1948  case NODE_IMAGINARY:
1949  dv = rb_node_imaginary_literal_val(val_node);
1950  break;
1951  case NODE_ENCODING:
1952  dv = rb_node_encoding_val(val_node);
1953  break;
1954  case NODE_NIL:
1955  dv = Qnil;
1956  break;
1957  case NODE_TRUE:
1958  dv = Qtrue;
1959  break;
1960  case NODE_FALSE:
1961  dv = Qfalse;
1962  break;
1963  default:
1964  NO_CHECK(COMPILE_POPPED(optargs, "kwarg", RNODE(node))); /* nd_type_p(node, NODE_KW_ARG) */
1965  dv = complex_mark;
1966  }
1967 
1968  keyword->num = ++di;
1969  rb_ary_push(default_values, dv);
1970  }
1971 
1972  node = node->nd_next;
1973  }
1974 
1975  keyword->num = kw;
1976 
1977  if (RNODE_DVAR(args->kw_rest_arg)->nd_vid != 0) {
1978  ID kw_id = iseq->body->local_table[arg_size];
1979  keyword->rest_start = arg_size++;
1980  body->param.flags.has_kwrest = TRUE;
1981 
1982  if (kw_id == idPow) body->param.flags.anon_kwrest = TRUE;
1983  }
1984  keyword->required_num = rkw;
1985  keyword->table = &body->local_table[keyword->bits_start - keyword->num];
1986 
1987  if (RARRAY_LEN(default_values)) {
1988  VALUE *dvs = ALLOC_N(VALUE, RARRAY_LEN(default_values));
1989 
1990  for (i = 0; i < RARRAY_LEN(default_values); i++) {
1991  VALUE dv = RARRAY_AREF(default_values, i);
1992  if (dv == complex_mark) dv = Qundef;
1993  RB_OBJ_WRITE(iseq, &dvs[i], dv);
1994  }
1995 
1996  keyword->default_values = dvs;
1997  }
1998  return arg_size;
1999 }
2000 
2001 static void
2002 iseq_set_use_block(rb_iseq_t *iseq)
2003 {
2004  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
2005  if (!body->param.flags.use_block) {
2006  body->param.flags.use_block = 1;
2007 
2008  rb_vm_t *vm = GET_VM();
2009 
2010  if (!rb_warning_category_enabled_p(RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK)) {
2011  st_data_t key = (st_data_t)rb_intern_str(body->location.label); // String -> ID
2012  st_insert(vm->unused_block_warning_table, key, 1);
2013  }
2014  }
2015 }
2016 
2017 static int
2018 iseq_set_arguments(rb_iseq_t *iseq, LINK_ANCHOR *const optargs, const NODE *const node_args)
2019 {
2020  debugs("iseq_set_arguments: %s\n", node_args ? "" : "0");
2021 
2022  if (node_args) {
2023  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
2024  struct rb_args_info *args = &RNODE_ARGS(node_args)->nd_ainfo;
2025  ID rest_id = 0;
2026  int last_comma = 0;
2027  ID block_id = 0;
2028  int arg_size;
2029 
2030  EXPECT_NODE("iseq_set_arguments", node_args, NODE_ARGS, COMPILE_NG);
2031 
2032  body->param.flags.ruby2_keywords = args->ruby2_keywords;
2033  body->param.lead_num = arg_size = (int)args->pre_args_num;
2034  if (body->param.lead_num > 0) body->param.flags.has_lead = TRUE;
2035  debugs(" - argc: %d\n", body->param.lead_num);
2036 
2037  rest_id = args->rest_arg;
2038  if (rest_id == NODE_SPECIAL_EXCESSIVE_COMMA) {
2039  last_comma = 1;
2040  rest_id = 0;
2041  }
2042  block_id = args->block_arg;
2043 
2044  bool optimized_forward = (args->forwarding && args->pre_args_num == 0 && !args->opt_args);
2045 
2046  if (optimized_forward) {
2047  rest_id = 0;
2048  block_id = 0;
2049  }
2050 
2051  if (args->opt_args) {
2052  const rb_node_opt_arg_t *node = args->opt_args;
2053  LABEL *label;
2054  VALUE labels = rb_ary_hidden_new(1);
2055  VALUE *opt_table;
2056  int i = 0, j;
2057 
2058  while (node) {
2059  label = NEW_LABEL(nd_line(RNODE(node)));
2060  rb_ary_push(labels, (VALUE)label | 1);
2061  ADD_LABEL(optargs, label);
2062  NO_CHECK(COMPILE_POPPED(optargs, "optarg", node->nd_body));
2063  node = node->nd_next;
2064  i += 1;
2065  }
2066 
2067  /* last label */
2068  label = NEW_LABEL(nd_line(node_args));
2069  rb_ary_push(labels, (VALUE)label | 1);
2070  ADD_LABEL(optargs, label);
2071 
2072  opt_table = ALLOC_N(VALUE, i+1);
2073 
2074  MEMCPY(opt_table, RARRAY_CONST_PTR(labels), VALUE, i+1);
2075  for (j = 0; j < i+1; j++) {
2076  opt_table[j] &= ~1;
2077  }
2078  rb_ary_clear(labels);
2079 
2080  body->param.flags.has_opt = TRUE;
2081  body->param.opt_num = i;
2082  body->param.opt_table = opt_table;
2083  arg_size += i;
2084  }
2085 
2086  if (rest_id) {
2087  body->param.rest_start = arg_size++;
2088  body->param.flags.has_rest = TRUE;
2089  if (rest_id == '*') body->param.flags.anon_rest = TRUE;
2090  RUBY_ASSERT(body->param.rest_start != -1);
2091  }
2092 
2093  if (args->first_post_arg) {
2094  body->param.post_start = arg_size;
2095  body->param.post_num = args->post_args_num;
2096  body->param.flags.has_post = TRUE;
2097  arg_size += args->post_args_num;
2098 
2099  if (body->param.flags.has_rest) { /* TODO: why that? */
2100  body->param.post_start = body->param.rest_start + 1;
2101  }
2102  }
2103 
2104  if (args->kw_args) {
2105  arg_size = iseq_set_arguments_keywords(iseq, optargs, args, arg_size);
2106  }
2107  else if (args->kw_rest_arg && !optimized_forward) {
2108  ID kw_id = iseq->body->local_table[arg_size];
2109  struct rb_iseq_param_keyword *keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
2110  keyword->rest_start = arg_size++;
2111  body->param.keyword = keyword;
2112  body->param.flags.has_kwrest = TRUE;
2113 
2114  static ID anon_kwrest = 0;
2115  if (!anon_kwrest) anon_kwrest = rb_intern("**");
2116  if (kw_id == anon_kwrest) body->param.flags.anon_kwrest = TRUE;
2117  }
2118  else if (args->no_kwarg) {
2119  body->param.flags.accepts_no_kwarg = TRUE;
2120  }
2121 
2122  if (block_id) {
2123  body->param.block_start = arg_size++;
2124  body->param.flags.has_block = TRUE;
2125  iseq_set_use_block(iseq);
2126  }
2127 
2128  // Only optimize specifically methods like this: `foo(...)`
2129  if (optimized_forward) {
2130  body->param.flags.use_block = 1;
2131  body->param.flags.forwardable = TRUE;
2132  arg_size = 1;
2133  }
2134 
2135  iseq_calc_param_size(iseq);
2136  body->param.size = arg_size;
2137 
2138  if (args->pre_init) { /* m_init */
2139  NO_CHECK(COMPILE_POPPED(optargs, "init arguments (m)", args->pre_init));
2140  }
2141  if (args->post_init) { /* p_init */
2142  NO_CHECK(COMPILE_POPPED(optargs, "init arguments (p)", args->post_init));
2143  }
2144 
2145  if (body->type == ISEQ_TYPE_BLOCK) {
2146  if (body->param.flags.has_opt == FALSE &&
2147  body->param.flags.has_post == FALSE &&
2148  body->param.flags.has_rest == FALSE &&
2149  body->param.flags.has_kw == FALSE &&
2150  body->param.flags.has_kwrest == FALSE) {
2151 
2152  if (body->param.lead_num == 1 && last_comma == 0) {
2153  /* {|a|} */
2154  body->param.flags.ambiguous_param0 = TRUE;
2155  }
2156  }
2157  }
2158  }
2159 
2160  return COMPILE_OK;
2161 }
2162 
2163 static int
2164 iseq_set_local_table(rb_iseq_t *iseq, const rb_ast_id_table_t *tbl, const NODE *const node_args)
2165 {
2166  unsigned int size = tbl ? tbl->size : 0;
2167  unsigned int offset = 0;
2168 
2169  if (node_args) {
2170  struct rb_args_info *args = &RNODE_ARGS(node_args)->nd_ainfo;
2171 
2172  // If we have a function that only has `...` as the parameter,
2173  // then its local table should only be `...`
2174  // FIXME: I think this should be fixed in the AST rather than special case here.
2175  if (args->forwarding && args->pre_args_num == 0 && !args->opt_args) {
2176  size -= 3;
2177  offset += 3;
2178  }
2179  }
2180 
2181  if (size > 0) {
2182  ID *ids = (ID *)ALLOC_N(ID, size);
2183  MEMCPY(ids, tbl->ids + offset, ID, size);
2184  ISEQ_BODY(iseq)->local_table = ids;
2185  }
2186  ISEQ_BODY(iseq)->local_table_size = size;
2187 
2188  debugs("iseq_set_local_table: %u\n", ISEQ_BODY(iseq)->local_table_size);
2189  return COMPILE_OK;
2190 }
2191 
2192 int
2193 rb_iseq_cdhash_cmp(VALUE val, VALUE lit)
2194 {
2195  int tval, tlit;
2196 
2197  if (val == lit) {
2198  return 0;
2199  }
2200  else if ((tlit = OBJ_BUILTIN_TYPE(lit)) == -1) {
2201  return val != lit;
2202  }
2203  else if ((tval = OBJ_BUILTIN_TYPE(val)) == -1) {
2204  return -1;
2205  }
2206  else if (tlit != tval) {
2207  return -1;
2208  }
2209  else if (tlit == T_SYMBOL) {
2210  return val != lit;
2211  }
2212  else if (tlit == T_STRING) {
2213  return rb_str_hash_cmp(lit, val);
2214  }
2215  else if (tlit == T_BIGNUM) {
2216  long x = FIX2LONG(rb_big_cmp(lit, val));
2217 
2218  /* Given lit and val are both Bignum, x must be -1, 0, 1.
2219  * There is no need to call rb_fix2int here. */
2220  RUBY_ASSERT((x == 1) || (x == 0) || (x == -1));
2221  return (int)x;
2222  }
2223  else if (tlit == T_FLOAT) {
2224  return rb_float_cmp(lit, val);
2225  }
2226  else if (tlit == T_RATIONAL) {
2227  const struct RRational *rat1 = RRATIONAL(val);
2228  const struct RRational *rat2 = RRATIONAL(lit);
2229  return rb_iseq_cdhash_cmp(rat1->num, rat2->num) || rb_iseq_cdhash_cmp(rat1->den, rat2->den);
2230  }
2231  else if (tlit == T_COMPLEX) {
2232  const struct RComplex *comp1 = RCOMPLEX(val);
2233  const struct RComplex *comp2 = RCOMPLEX(lit);
2234  return rb_iseq_cdhash_cmp(comp1->real, comp2->real) || rb_iseq_cdhash_cmp(comp1->imag, comp2->imag);
2235  }
2236  else if (tlit == T_REGEXP) {
2237  return rb_reg_equal(val, lit) ? 0 : -1;
2238  }
2239  else {
2240  UNREACHABLE_RETURN(-1);
2241  }
2242 }
2243 
2244 st_index_t
2245 rb_iseq_cdhash_hash(VALUE a)
2246 {
2247  switch (OBJ_BUILTIN_TYPE(a)) {
2248  case -1:
2249  case T_SYMBOL:
2250  return (st_index_t)a;
2251  case T_STRING:
2252  return rb_str_hash(a);
2253  case T_BIGNUM:
2254  return FIX2LONG(rb_big_hash(a));
2255  case T_FLOAT:
2256  return rb_dbl_long_hash(RFLOAT_VALUE(a));
2257  case T_RATIONAL:
2258  return rb_rational_hash(a);
2259  case T_COMPLEX:
2260  return rb_complex_hash(a);
2261  case T_REGEXP:
2262  return NUM2LONG(rb_reg_hash(a));
2263  default:
2264  UNREACHABLE_RETURN(0);
2265  }
2266 }
2267 
2268 static const struct st_hash_type cdhash_type = {
2269  rb_iseq_cdhash_cmp,
2270  rb_iseq_cdhash_hash,
2271 };
2272 
2274  VALUE hash;
2275  int pos;
2276  int len;
2277 };
2278 
2279 static int
2280 cdhash_set_label_i(VALUE key, VALUE val, VALUE ptr)
2281 {
2282  struct cdhash_set_label_struct *data = (struct cdhash_set_label_struct *)ptr;
2283  LABEL *lobj = (LABEL *)(val & ~1);
2284  rb_hash_aset(data->hash, key, INT2FIX(lobj->position - (data->pos+data->len)));
2285  return ST_CONTINUE;
2286 }
2287 
2288 
2289 static inline VALUE
2290 get_ivar_ic_value(rb_iseq_t *iseq,ID id)
2291 {
2292  return INT2FIX(ISEQ_BODY(iseq)->ivc_size++);
2293 }
2294 
2295 static inline VALUE
2296 get_cvar_ic_value(rb_iseq_t *iseq,ID id)
2297 {
2298  VALUE val;
2299  struct rb_id_table *tbl = ISEQ_COMPILE_DATA(iseq)->ivar_cache_table;
2300  if (tbl) {
2301  if (rb_id_table_lookup(tbl,id,&val)) {
2302  return val;
2303  }
2304  }
2305  else {
2306  tbl = rb_id_table_create(1);
2307  ISEQ_COMPILE_DATA(iseq)->ivar_cache_table = tbl;
2308  }
2309  val = INT2FIX(ISEQ_BODY(iseq)->icvarc_size++);
2310  rb_id_table_insert(tbl,id,val);
2311  return val;
2312 }
2313 
2314 #define BADINSN_DUMP(anchor, list, dest) \
2315  dump_disasm_list_with_cursor(FIRST_ELEMENT(anchor), list, dest)
2316 
2317 #define BADINSN_ERROR \
2318  (xfree(generated_iseq), \
2319  xfree(insns_info), \
2320  BADINSN_DUMP(anchor, list, NULL), \
2321  COMPILE_ERROR)
2322 
2323 static int
2324 fix_sp_depth(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
2325 {
2326  int stack_max = 0, sp = 0, line = 0;
2327  LINK_ELEMENT *list;
2328 
2329  for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2330  if (IS_LABEL(list)) {
2331  LABEL *lobj = (LABEL *)list;
2332  lobj->set = TRUE;
2333  }
2334  }
2335 
2336  for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2337  switch (list->type) {
2338  case ISEQ_ELEMENT_INSN:
2339  {
2340  int j, len, insn;
2341  const char *types;
2342  VALUE *operands;
2343  INSN *iobj = (INSN *)list;
2344 
2345  /* update sp */
2346  sp = calc_sp_depth(sp, iobj);
2347  if (sp < 0) {
2348  BADINSN_DUMP(anchor, list, NULL);
2349  COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2350  "argument stack underflow (%d)", sp);
2351  return -1;
2352  }
2353  if (sp > stack_max) {
2354  stack_max = sp;
2355  }
2356 
2357  line = iobj->insn_info.line_no;
2358  /* fprintf(stderr, "insn: %-16s, sp: %d\n", insn_name(iobj->insn_id), sp); */
2359  operands = iobj->operands;
2360  insn = iobj->insn_id;
2361  types = insn_op_types(insn);
2362  len = insn_len(insn);
2363 
2364  /* operand check */
2365  if (iobj->operand_size != len - 1) {
2366  /* printf("operand size miss! (%d, %d)\n", iobj->operand_size, len); */
2367  BADINSN_DUMP(anchor, list, NULL);
2368  COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2369  "operand size miss! (%d for %d)",
2370  iobj->operand_size, len - 1);
2371  return -1;
2372  }
2373 
2374  for (j = 0; types[j]; j++) {
2375  if (types[j] == TS_OFFSET) {
2376  /* label(destination position) */
2377  LABEL *lobj = (LABEL *)operands[j];
2378  if (!lobj->set) {
2379  BADINSN_DUMP(anchor, list, NULL);
2380  COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2381  "unknown label: "LABEL_FORMAT, lobj->label_no);
2382  return -1;
2383  }
2384  if (lobj->sp == -1) {
2385  lobj->sp = sp;
2386  }
2387  else if (lobj->sp != sp) {
2388  debugs("%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT " sp: %d, calculated sp: %d)\n",
2389  RSTRING_PTR(rb_iseq_path(iseq)), line,
2390  lobj->label_no, lobj->sp, sp);
2391  }
2392  }
2393  }
2394  break;
2395  }
2396  case ISEQ_ELEMENT_LABEL:
2397  {
2398  LABEL *lobj = (LABEL *)list;
2399  if (lobj->sp == -1) {
2400  lobj->sp = sp;
2401  }
2402  else {
2403  if (lobj->sp != sp) {
2404  debugs("%s:%d: sp inconsistency found but ignored (" LABEL_FORMAT " sp: %d, calculated sp: %d)\n",
2405  RSTRING_PTR(rb_iseq_path(iseq)), line,
2406  lobj->label_no, lobj->sp, sp);
2407  }
2408  sp = lobj->sp;
2409  }
2410  break;
2411  }
2412  case ISEQ_ELEMENT_TRACE:
2413  {
2414  /* ignore */
2415  break;
2416  }
2417  case ISEQ_ELEMENT_ADJUST:
2418  {
2419  ADJUST *adjust = (ADJUST *)list;
2420  int orig_sp = sp;
2421 
2422  sp = adjust->label ? adjust->label->sp : 0;
2423  if (adjust->line_no != -1 && orig_sp - sp < 0) {
2424  BADINSN_DUMP(anchor, list, NULL);
2425  COMPILE_ERROR(iseq, adjust->line_no,
2426  "iseq_set_sequence: adjust bug %d < %d",
2427  orig_sp, sp);
2428  return -1;
2429  }
2430  break;
2431  }
2432  default:
2433  BADINSN_DUMP(anchor, list, NULL);
2434  COMPILE_ERROR(iseq, line, "unknown list type: %d", list->type);
2435  return -1;
2436  }
2437  }
2438  return stack_max;
2439 }
2440 
2441 static int
2442 add_insn_info(struct iseq_insn_info_entry *insns_info, unsigned int *positions,
2443  int insns_info_index, int code_index, const INSN *iobj)
2444 {
2445  if (insns_info_index == 0 ||
2446  insns_info[insns_info_index-1].line_no != iobj->insn_info.line_no ||
2447 #ifdef USE_ISEQ_NODE_ID
2448  insns_info[insns_info_index-1].node_id != iobj->insn_info.node_id ||
2449 #endif
2450  insns_info[insns_info_index-1].events != iobj->insn_info.events) {
2451  insns_info[insns_info_index].line_no = iobj->insn_info.line_no;
2452 #ifdef USE_ISEQ_NODE_ID
2453  insns_info[insns_info_index].node_id = iobj->insn_info.node_id;
2454 #endif
2455  insns_info[insns_info_index].events = iobj->insn_info.events;
2456  positions[insns_info_index] = code_index;
2457  return TRUE;
2458  }
2459  return FALSE;
2460 }
2461 
2462 static int
2463 add_adjust_info(struct iseq_insn_info_entry *insns_info, unsigned int *positions,
2464  int insns_info_index, int code_index, const ADJUST *adjust)
2465 {
2466  insns_info[insns_info_index].line_no = adjust->line_no;
2467  insns_info[insns_info_index].node_id = -1;
2468  insns_info[insns_info_index].events = 0;
2469  positions[insns_info_index] = code_index;
2470  return TRUE;
2471 }
2472 
2473 static ID *
2474 array_to_idlist(VALUE arr)
2475 {
2476  RUBY_ASSERT(RB_TYPE_P(arr, T_ARRAY));
2477  long size = RARRAY_LEN(arr);
2478  ID *ids = (ID *)ALLOC_N(ID, size + 1);
2479  for (int i = 0; i < size; i++) {
2480  VALUE sym = RARRAY_AREF(arr, i);
2481  ids[i] = SYM2ID(sym);
2482  }
2483  ids[size] = 0;
2484  return ids;
2485 }
2486 
2487 static VALUE
2488 idlist_to_array(const ID *ids)
2489 {
2490  VALUE arr = rb_ary_new();
2491  while (*ids) {
2492  rb_ary_push(arr, ID2SYM(*ids++));
2493  }
2494  return arr;
2495 }
2496 
2500 static int
2501 iseq_set_sequence(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
2502 {
2503  struct iseq_insn_info_entry *insns_info;
2504  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
2505  unsigned int *positions;
2506  LINK_ELEMENT *list;
2507  VALUE *generated_iseq;
2508  rb_event_flag_t events = 0;
2509  long data = 0;
2510 
2511  int insn_num, code_index, insns_info_index, sp = 0;
2512  int stack_max = fix_sp_depth(iseq, anchor);
2513 
2514  if (stack_max < 0) return COMPILE_NG;
2515 
2516  /* fix label position */
2517  insn_num = code_index = 0;
2518  for (list = FIRST_ELEMENT(anchor); list; list = list->next) {
2519  switch (list->type) {
2520  case ISEQ_ELEMENT_INSN:
2521  {
2522  INSN *iobj = (INSN *)list;
2523  /* update sp */
2524  sp = calc_sp_depth(sp, iobj);
2525  insn_num++;
2526  events = iobj->insn_info.events |= events;
2527  if (ISEQ_COVERAGE(iseq)) {
2528  if (ISEQ_LINE_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_LINE) &&
2529  !(rb_get_coverage_mode() & COVERAGE_TARGET_ONESHOT_LINES)) {
2530  int line = iobj->insn_info.line_no - 1;
2531  if (line >= 0 && line < RARRAY_LEN(ISEQ_LINE_COVERAGE(iseq))) {
2532  RARRAY_ASET(ISEQ_LINE_COVERAGE(iseq), line, INT2FIX(0));
2533  }
2534  }
2535  if (ISEQ_BRANCH_COVERAGE(iseq) && (events & RUBY_EVENT_COVERAGE_BRANCH)) {
2536  while (RARRAY_LEN(ISEQ_PC2BRANCHINDEX(iseq)) <= code_index) {
2537  rb_ary_push(ISEQ_PC2BRANCHINDEX(iseq), Qnil);
2538  }
2539  RARRAY_ASET(ISEQ_PC2BRANCHINDEX(iseq), code_index, INT2FIX(data));
2540  }
2541  }
2542  code_index += insn_data_length(iobj);
2543  events = 0;
2544  data = 0;
2545  break;
2546  }
2547  case ISEQ_ELEMENT_LABEL:
2548  {
2549  LABEL *lobj = (LABEL *)list;
2550  lobj->position = code_index;
2551  if (lobj->sp != sp) {
2552  debugs("%s: sp inconsistency found but ignored (" LABEL_FORMAT " sp: %d, calculated sp: %d)\n",
2553  RSTRING_PTR(rb_iseq_path(iseq)),
2554  lobj->label_no, lobj->sp, sp);
2555  }
2556  sp = lobj->sp;
2557  break;
2558  }
2559  case ISEQ_ELEMENT_TRACE:
2560  {
2561  TRACE *trace = (TRACE *)list;
2562  events |= trace->event;
2563  if (trace->event & RUBY_EVENT_COVERAGE_BRANCH) data = trace->data;
2564  break;
2565  }
2566  case ISEQ_ELEMENT_ADJUST:
2567  {
2568  ADJUST *adjust = (ADJUST *)list;
2569  if (adjust->line_no != -1) {
2570  int orig_sp = sp;
2571  sp = adjust->label ? adjust->label->sp : 0;
2572  if (orig_sp - sp > 0) {
2573  if (orig_sp - sp > 1) code_index++; /* 1 operand */
2574  code_index++; /* insn */
2575  insn_num++;
2576  }
2577  }
2578  break;
2579  }
2580  default: break;
2581  }
2582  }
2583 
2584  /* make instruction sequence */
2585  generated_iseq = ALLOC_N(VALUE, code_index);
2586  insns_info = ALLOC_N(struct iseq_insn_info_entry, insn_num);
2587  positions = ALLOC_N(unsigned int, insn_num);
2588  if (ISEQ_IS_SIZE(body)) {
2589  body->is_entries = ZALLOC_N(union iseq_inline_storage_entry, ISEQ_IS_SIZE(body));
2590  }
2591  else {
2592  body->is_entries = NULL;
2593  }
2594  body->call_data = ZALLOC_N(struct rb_call_data, body->ci_size);
2595  ISEQ_COMPILE_DATA(iseq)->ci_index = 0;
2596 
2597  // Calculate the bitmask buffer size.
2598  // Round the generated_iseq size up to the nearest multiple
2599  // of the number of bits in an unsigned long.
2600 
2601  // Allocate enough room for the bitmask list
2602  iseq_bits_t * mark_offset_bits;
2603  int code_size = code_index;
2604 
2605  iseq_bits_t tmp[1] = {0};
2606  bool needs_bitmap = false;
2607 
2608  if (ISEQ_MBITS_BUFLEN(code_index) == 1) {
2609  mark_offset_bits = tmp;
2610  }
2611  else {
2612  mark_offset_bits = ZALLOC_N(iseq_bits_t, ISEQ_MBITS_BUFLEN(code_index));
2613  }
2614 
2615  list = FIRST_ELEMENT(anchor);
2616  insns_info_index = code_index = sp = 0;
2617 
2618  while (list) {
2619  switch (list->type) {
2620  case ISEQ_ELEMENT_INSN:
2621  {
2622  int j, len, insn;
2623  const char *types;
2624  VALUE *operands;
2625  INSN *iobj = (INSN *)list;
2626 
2627  /* update sp */
2628  sp = calc_sp_depth(sp, iobj);
2629  /* fprintf(stderr, "insn: %-16s, sp: %d\n", insn_name(iobj->insn_id), sp); */
2630  operands = iobj->operands;
2631  insn = iobj->insn_id;
2632  generated_iseq[code_index] = insn;
2633  types = insn_op_types(insn);
2634  len = insn_len(insn);
2635 
2636  for (j = 0; types[j]; j++) {
2637  char type = types[j];
2638 
2639  /* printf("--> [%c - (%d-%d)]\n", type, k, j); */
2640  switch (type) {
2641  case TS_OFFSET:
2642  {
2643  /* label(destination position) */
2644  LABEL *lobj = (LABEL *)operands[j];
2645  generated_iseq[code_index + 1 + j] = lobj->position - (code_index + len);
2646  break;
2647  }
2648  case TS_CDHASH:
2649  {
2650  VALUE map = operands[j];
2651  struct cdhash_set_label_struct data;
2652  data.hash = map;
2653  data.pos = code_index;
2654  data.len = len;
2655  rb_hash_foreach(map, cdhash_set_label_i, (VALUE)&data);
2656 
2657  rb_hash_rehash(map);
2658  freeze_hide_obj(map);
2659  generated_iseq[code_index + 1 + j] = map;
2660  ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2661  RB_OBJ_WRITTEN(iseq, Qundef, map);
2662  needs_bitmap = true;
2663  break;
2664  }
2665  case TS_LINDEX:
2666  case TS_NUM: /* ulong */
2667  generated_iseq[code_index + 1 + j] = FIX2INT(operands[j]);
2668  break;
2669  case TS_ISEQ: /* iseq */
2670  case TS_VALUE: /* VALUE */
2671  {
2672  VALUE v = operands[j];
2673  generated_iseq[code_index + 1 + j] = v;
2674  /* to mark ruby object */
2675  if (!SPECIAL_CONST_P(v)) {
2676  RB_OBJ_WRITTEN(iseq, Qundef, v);
2677  ISEQ_MBITS_SET(mark_offset_bits, code_index + 1 + j);
2678  needs_bitmap = true;
2679  }
2680  break;
2681  }
2682  /* [ TS_IVC | TS_ICVARC | TS_ISE | TS_IC ] */
2683  case TS_IC: /* inline cache: constants */
2684  {
2685  unsigned int ic_index = ISEQ_COMPILE_DATA(iseq)->ic_index++;
2686  IC ic = &ISEQ_IS_ENTRY_START(body, type)[ic_index].ic_cache;
2687  if (UNLIKELY(ic_index >= body->ic_size)) {
2688  BADINSN_DUMP(anchor, &iobj->link, 0);
2689  COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2690  "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2691  ic_index, ISEQ_IS_SIZE(body));
2692  }
2693 
2694  ic->segments = array_to_idlist(operands[j]);
2695 
2696  generated_iseq[code_index + 1 + j] = (VALUE)ic;
2697  }
2698  break;
2699  case TS_IVC: /* inline ivar cache */
2700  {
2701  unsigned int ic_index = FIX2UINT(operands[j]);
2702 
2703  IVC cache = ((IVC)&body->is_entries[ic_index]);
2704 
2705  if (insn == BIN(setinstancevariable)) {
2706  cache->iv_set_name = SYM2ID(operands[j - 1]);
2707  }
2708  else {
2709  cache->iv_set_name = 0;
2710  }
2711 
2712  vm_ic_attr_index_initialize(cache, INVALID_SHAPE_ID);
2713  }
2714  case TS_ISE: /* inline storage entry: `once` insn */
2715  case TS_ICVARC: /* inline cvar cache */
2716  {
2717  unsigned int ic_index = FIX2UINT(operands[j]);
2718  IC ic = &ISEQ_IS_ENTRY_START(body, type)[ic_index].ic_cache;
2719  if (UNLIKELY(ic_index >= ISEQ_IS_SIZE(body))) {
2720  BADINSN_DUMP(anchor, &iobj->link, 0);
2721  COMPILE_ERROR(iseq, iobj->insn_info.line_no,
2722  "iseq_set_sequence: ic_index overflow: index: %d, size: %d",
2723  ic_index, ISEQ_IS_SIZE(body));
2724  }
2725  generated_iseq[code_index + 1 + j] = (VALUE)ic;
2726 
2727  break;
2728  }
2729  case TS_CALLDATA:
2730  {
2731  const struct rb_callinfo *source_ci = (const struct rb_callinfo *)operands[j];
2732  RUBY_ASSERT(ISEQ_COMPILE_DATA(iseq)->ci_index <= body->ci_size);
2733  struct rb_call_data *cd = &body->call_data[ISEQ_COMPILE_DATA(iseq)->ci_index++];
2734  cd->ci = source_ci;
2735  cd->cc = vm_cc_empty();
2736  generated_iseq[code_index + 1 + j] = (VALUE)cd;
2737  break;
2738  }
2739  case TS_ID: /* ID */
2740  generated_iseq[code_index + 1 + j] = SYM2ID(operands[j]);
2741  break;
2742  case TS_FUNCPTR:
2743  generated_iseq[code_index + 1 + j] = operands[j];
2744  break;
2745  case TS_BUILTIN:
2746  generated_iseq[code_index + 1 + j] = operands[j];
2747  break;
2748  default:
2749  BADINSN_ERROR(iseq, iobj->insn_info.line_no,
2750  "unknown operand type: %c", type);
2751  return COMPILE_NG;
2752  }
2753  }
2754  if (add_insn_info(insns_info, positions, insns_info_index, code_index, iobj)) insns_info_index++;
2755  code_index += len;
2756  break;
2757  }
2758  case ISEQ_ELEMENT_LABEL:
2759  {
2760  LABEL *lobj = (LABEL *)list;
2761  if (lobj->sp != sp) {
2762  debugs("%s: sp inconsistency found but ignored (" LABEL_FORMAT " sp: %d, calculated sp: %d)\n",
2763  RSTRING_PTR(rb_iseq_path(iseq)),
2764  lobj->label_no, lobj->sp, sp);
2765  }
2766  sp = lobj->sp;
2767  break;
2768  }
2769  case ISEQ_ELEMENT_ADJUST:
2770  {
2771  ADJUST *adjust = (ADJUST *)list;
2772  int orig_sp = sp;
2773 
2774  if (adjust->label) {
2775  sp = adjust->label->sp;
2776  }
2777  else {
2778  sp = 0;
2779  }
2780 
2781  if (adjust->line_no != -1) {
2782  const int diff = orig_sp - sp;
2783  if (diff > 0) {
2784  if (insns_info_index == 0) {
2785  COMPILE_ERROR(iseq, adjust->line_no,
2786  "iseq_set_sequence: adjust bug (ISEQ_ELEMENT_ADJUST must not be the first in iseq)");
2787  }
2788  if (add_adjust_info(insns_info, positions, insns_info_index, code_index, adjust)) insns_info_index++;
2789  }
2790  if (diff > 1) {
2791  generated_iseq[code_index++] = BIN(adjuststack);
2792  generated_iseq[code_index++] = orig_sp - sp;
2793  }
2794  else if (diff == 1) {
2795  generated_iseq[code_index++] = BIN(pop);
2796  }
2797  else if (diff < 0) {
2798  int label_no = adjust->label ? adjust->label->label_no : -1;
2799  xfree(generated_iseq);
2800  xfree(insns_info);
2801  xfree(positions);
2802  if (ISEQ_MBITS_BUFLEN(code_size) > 1) {
2803  xfree(mark_offset_bits);
2804  }
2805  debug_list(anchor, list);
2806  COMPILE_ERROR(iseq, adjust->line_no,
2807  "iseq_set_sequence: adjust bug to %d %d < %d",
2808  label_no, orig_sp, sp);
2809  return COMPILE_NG;
2810  }
2811  }
2812  break;
2813  }
2814  default:
2815  /* ignore */
2816  break;
2817  }
2818  list = list->next;
2819  }
2820 
2821  body->iseq_encoded = (void *)generated_iseq;
2822  body->iseq_size = code_index;
2823  body->stack_max = stack_max;
2824 
2825  if (ISEQ_MBITS_BUFLEN(body->iseq_size) == 1) {
2826  body->mark_bits.single = mark_offset_bits[0];
2827  }
2828  else {
2829  if (needs_bitmap) {
2830  body->mark_bits.list = mark_offset_bits;
2831  }
2832  else {
2833  body->mark_bits.list = 0;
2834  ruby_xfree(mark_offset_bits);
2835  }
2836  }
2837 
2838  /* get rid of memory leak when REALLOC failed */
2839  body->insns_info.body = insns_info;
2840  body->insns_info.positions = positions;
2841 
2842  REALLOC_N(insns_info, struct iseq_insn_info_entry, insns_info_index);
2843  body->insns_info.body = insns_info;
2844  REALLOC_N(positions, unsigned int, insns_info_index);
2845  body->insns_info.positions = positions;
2846  body->insns_info.size = insns_info_index;
2847 
2848  return COMPILE_OK;
2849 }
2850 
2851 static int
2852 label_get_position(LABEL *lobj)
2853 {
2854  return lobj->position;
2855 }
2856 
2857 static int
2858 label_get_sp(LABEL *lobj)
2859 {
2860  return lobj->sp;
2861 }
2862 
2863 static int
2864 iseq_set_exception_table(rb_iseq_t *iseq)
2865 {
2866  const VALUE *tptr, *ptr;
2867  unsigned int tlen, i;
2868  struct iseq_catch_table_entry *entry;
2869 
2870  ISEQ_BODY(iseq)->catch_table = NULL;
2871 
2872  VALUE catch_table_ary = ISEQ_COMPILE_DATA(iseq)->catch_table_ary;
2873  if (NIL_P(catch_table_ary)) return COMPILE_OK;
2874  tlen = (int)RARRAY_LEN(catch_table_ary);
2875  tptr = RARRAY_CONST_PTR(catch_table_ary);
2876 
2877  if (tlen > 0) {
2878  struct iseq_catch_table *table = xmalloc(iseq_catch_table_bytes(tlen));
2879  table->size = tlen;
2880 
2881  for (i = 0; i < table->size; i++) {
2882  int pos;
2883  ptr = RARRAY_CONST_PTR(tptr[i]);
2884  entry = UNALIGNED_MEMBER_PTR(table, entries[i]);
2885  entry->type = (enum rb_catch_type)(ptr[0] & 0xffff);
2886  pos = label_get_position((LABEL *)(ptr[1] & ~1));
2887  RUBY_ASSERT(pos >= 0);
2888  entry->start = (unsigned int)pos;
2889  pos = label_get_position((LABEL *)(ptr[2] & ~1));
2890  RUBY_ASSERT(pos >= 0);
2891  entry->end = (unsigned int)pos;
2892  entry->iseq = (rb_iseq_t *)ptr[3];
2893  RB_OBJ_WRITTEN(iseq, Qundef, entry->iseq);
2894 
2895  /* stack depth */
2896  if (ptr[4]) {
2897  LABEL *lobj = (LABEL *)(ptr[4] & ~1);
2898  entry->cont = label_get_position(lobj);
2899  entry->sp = label_get_sp(lobj);
2900 
2901  /* TODO: Dirty Hack! Fix me */
2902  if (entry->type == CATCH_TYPE_RESCUE ||
2903  entry->type == CATCH_TYPE_BREAK ||
2904  entry->type == CATCH_TYPE_NEXT) {
2905  RUBY_ASSERT(entry->sp > 0);
2906  entry->sp--;
2907  }
2908  }
2909  else {
2910  entry->cont = 0;
2911  }
2912  }
2913  ISEQ_BODY(iseq)->catch_table = table;
2914  RB_OBJ_WRITE(iseq, &ISEQ_COMPILE_DATA(iseq)->catch_table_ary, 0); /* free */
2915  }
2916 
2917  RB_GC_GUARD(catch_table_ary);
2918 
2919  return COMPILE_OK;
2920 }
2921 
2922 /*
2923  * set optional argument table
2924  * def foo(a, b=expr1, c=expr2)
2925  * =>
2926  * b:
2927  * expr1
2928  * c:
2929  * expr2
2930  */
2931 static int
2932 iseq_set_optargs_table(rb_iseq_t *iseq)
2933 {
2934  int i;
2935  VALUE *opt_table = (VALUE *)ISEQ_BODY(iseq)->param.opt_table;
2936 
2937  if (ISEQ_BODY(iseq)->param.flags.has_opt) {
2938  for (i = 0; i < ISEQ_BODY(iseq)->param.opt_num + 1; i++) {
2939  opt_table[i] = label_get_position((LABEL *)opt_table[i]);
2940  }
2941  }
2942  return COMPILE_OK;
2943 }
2944 
2945 static LINK_ELEMENT *
2946 get_destination_insn(INSN *iobj)
2947 {
2948  LABEL *lobj = (LABEL *)OPERAND_AT(iobj, 0);
2949  LINK_ELEMENT *list;
2950  rb_event_flag_t events = 0;
2951 
2952  list = lobj->link.next;
2953  while (list) {
2954  switch (list->type) {
2955  case ISEQ_ELEMENT_INSN:
2956  case ISEQ_ELEMENT_ADJUST:
2957  goto found;
2958  case ISEQ_ELEMENT_LABEL:
2959  /* ignore */
2960  break;
2961  case ISEQ_ELEMENT_TRACE:
2962  {
2963  TRACE *trace = (TRACE *)list;
2964  events |= trace->event;
2965  }
2966  break;
2967  default: break;
2968  }
2969  list = list->next;
2970  }
2971  found:
2972  if (list && IS_INSN(list)) {
2973  INSN *iobj = (INSN *)list;
2974  iobj->insn_info.events |= events;
2975  }
2976  return list;
2977 }
2978 
2979 static LINK_ELEMENT *
2980 get_next_insn(INSN *iobj)
2981 {
2982  LINK_ELEMENT *list = iobj->link.next;
2983 
2984  while (list) {
2985  if (IS_INSN(list) || IS_ADJUST(list)) {
2986  return list;
2987  }
2988  list = list->next;
2989  }
2990  return 0;
2991 }
2992 
2993 static LINK_ELEMENT *
2994 get_prev_insn(INSN *iobj)
2995 {
2996  LINK_ELEMENT *list = iobj->link.prev;
2997 
2998  while (list) {
2999  if (IS_INSN(list) || IS_ADJUST(list)) {
3000  return list;
3001  }
3002  list = list->prev;
3003  }
3004  return 0;
3005 }
3006 
3007 static void
3008 unref_destination(INSN *iobj, int pos)
3009 {
3010  LABEL *lobj = (LABEL *)OPERAND_AT(iobj, pos);
3011  --lobj->refcnt;
3012  if (!lobj->refcnt) ELEM_REMOVE(&lobj->link);
3013 }
3014 
3015 static bool
3016 replace_destination(INSN *dobj, INSN *nobj)
3017 {
3018  VALUE n = OPERAND_AT(nobj, 0);
3019  LABEL *dl = (LABEL *)OPERAND_AT(dobj, 0);
3020  LABEL *nl = (LABEL *)n;
3021  if (dl == nl) return false;
3022  --dl->refcnt;
3023  ++nl->refcnt;
3024  OPERAND_AT(dobj, 0) = n;
3025  if (!dl->refcnt) ELEM_REMOVE(&dl->link);
3026  return true;
3027 }
3028 
3029 static LABEL*
3030 find_destination(INSN *i)
3031 {
3032  int pos, len = insn_len(i->insn_id);
3033  for (pos = 0; pos < len; ++pos) {
3034  if (insn_op_types(i->insn_id)[pos] == TS_OFFSET) {
3035  return (LABEL *)OPERAND_AT(i, pos);
3036  }
3037  }
3038  return 0;
3039 }
3040 
3041 static int
3042 remove_unreachable_chunk(rb_iseq_t *iseq, LINK_ELEMENT *i)
3043 {
3044  LINK_ELEMENT *first = i, *end;
3045  int *unref_counts = 0, nlabels = ISEQ_COMPILE_DATA(iseq)->label_no;
3046 
3047  if (!i) return 0;
3048  unref_counts = ALLOCA_N(int, nlabels);
3049  MEMZERO(unref_counts, int, nlabels);
3050  end = i;
3051  do {
3052  LABEL *lab;
3053  if (IS_INSN(i)) {
3054  if (IS_INSN_ID(i, leave)) {
3055  end = i;
3056  break;
3057  }
3058  else if ((lab = find_destination((INSN *)i)) != 0) {
3059  unref_counts[lab->label_no]++;
3060  }
3061  }
3062  else if (IS_LABEL(i)) {
3063  lab = (LABEL *)i;
3064  if (lab->unremovable) return 0;
3065  if (lab->refcnt > unref_counts[lab->label_no]) {
3066  if (i == first) return 0;
3067  break;
3068  }
3069  continue;
3070  }
3071  else if (IS_TRACE(i)) {
3072  /* do nothing */
3073  }
3074  else if (IS_ADJUST(i)) {
3075  return 0;
3076  }
3077  end = i;
3078  } while ((i = i->next) != 0);
3079  i = first;
3080  do {
3081  if (IS_INSN(i)) {
3082  struct rb_iseq_constant_body *body = ISEQ_BODY(iseq);
3083  VALUE insn = INSN_OF(i);
3084  int pos, len = insn_len(insn);
3085  for (pos = 0; pos < len; ++pos) {
3086  switch (insn_op_types(insn)[pos]) {
3087  case TS_OFFSET:
3088  unref_destination((INSN *)i, pos);
3089  break;
3090  case TS_CALLDATA:
3091  --(body->ci_size);
3092  break;
3093  }
3094  }
3095  }
3096  ELEM_REMOVE(i);
3097  } while ((i != end) && (i = i->next) != 0);
3098  return 1;
3099 }
3100 
3101 static int
3102 iseq_pop_newarray(rb_iseq_t *iseq, INSN *iobj)
3103 {
3104  switch (OPERAND_AT(iobj, 0)) {
3105  case INT2FIX(0): /* empty array */
3106  ELEM_REMOVE(&iobj->link);
3107  return TRUE;
3108  case INT2FIX(1): /* single element array */
3109  ELEM_REMOVE(&iobj->link);
3110  return FALSE;
3111  default:
3112  iobj->insn_id = BIN(adjuststack);
3113  return TRUE;
3114  }
3115 }
3116 
3117 static int
3118 is_frozen_putstring(INSN *insn, VALUE *op)
3119 {
3120  if (IS_INSN_ID(insn, putstring) || IS_INSN_ID(insn, putchilledstring)) {
3121  *op = OPERAND_AT(insn, 0);
3122  return 1;
3123  }
3124  else if (IS_INSN_ID(insn, putobject)) { /* frozen_string_literal */
3125  *op = OPERAND_AT(insn, 0);
3126  return RB_TYPE_P(*op, T_STRING);
3127  }
3128  return 0;
3129 }
3130 
3131 static int
3132 optimize_checktype(rb_iseq_t *iseq, INSN *iobj)
3133 {
3134  /*
3135  * putobject obj
3136  * dup
3137  * checktype T_XXX
3138  * branchif l1
3139  * l2:
3140  * ...
3141  * l1:
3142  *
3143  * => obj is a T_XXX
3144  *
3145  * putobject obj (T_XXX)
3146  * jump L1
3147  * L1:
3148  *
3149  * => obj is not a T_XXX
3150  *
3151  * putobject obj (T_XXX)
3152  * jump L2
3153  * L2:
3154  */
3155  int line, node_id;
3156  INSN *niobj, *ciobj, *dup = 0;
3157  LABEL *dest = 0;
3158  VALUE type;
3159 
3160  switch (INSN_OF(iobj)) {
3161  case BIN(putstring):
3162  case BIN(putchilledstring):
3163  type = INT2FIX(T_STRING);
3164  break;
3165  case BIN(putnil):
3166  type = INT2FIX(T_NIL);
3167  break;
3168  case BIN(putobject):
3169  type = INT2FIX(TYPE(OPERAND_AT(iobj, 0)));
3170  break;
3171  default: return FALSE;
3172  }
3173 
3174  ciobj = (INSN *)get_next_insn(iobj);
3175  if (IS_INSN_ID(ciobj, jump)) {
3176  ciobj = (INSN *)get_next_insn((INSN*)OPERAND_AT(ciobj, 0));
3177  }
3178  if (IS_INSN_ID(ciobj, dup)) {
3179  ciobj = (INSN *)get_next_insn(dup = ciobj);
3180  }
3181  if (!ciobj || !IS_INSN_ID(ciobj, checktype)) return FALSE;
3182  niobj = (INSN *)get_next_insn(ciobj);
3183  if (!niobj) {
3184  /* TODO: putobject true/false */
3185  return FALSE;
3186  }
3187  switch (INSN_OF(niobj)) {
3188  case BIN(branchif):
3189  if (OPERAND_AT(ciobj, 0) == type) {
3190  dest = (LABEL *)OPERAND_AT(niobj, 0);
3191  }
3192  break;
3193  case BIN(branchunless):
3194  if (OPERAND_AT(ciobj, 0) != type) {
3195  dest = (LABEL *)OPERAND_AT(niobj, 0);
3196  }
3197  break;
3198  default:
3199  return FALSE;
3200  }
3201  line = ciobj->insn_info.line_no;
3202  node_id = ciobj->insn_info.node_id;
3203  if (!dest) {
3204  if (niobj->link.next && IS_LABEL(niobj->link.next)) {
3205  dest = (LABEL *)niobj->link.next; /* reuse label */
3206  }
3207  else {
3208  dest = NEW_LABEL(line);
3209  ELEM_INSERT_NEXT(&niobj->link, &dest->link);
3210  }
3211  }
3212  INSERT_AFTER_INSN1(iobj, line, node_id, jump, dest);
3213  LABEL_REF(dest);
3214  if (!dup) INSERT_AFTER_INSN(iobj, line, node_id, pop);
3215  return TRUE;
3216 }
3217 
3218 static const struct rb_callinfo *
3219 ci_flag_set(const rb_iseq_t *iseq, const struct rb_callinfo *ci, unsigned int add)
3220 {
3221  const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3222  vm_ci_flag(ci) | add,
3223  vm_ci_argc(ci),
3224  vm_ci_kwarg(ci));
3225  RB_OBJ_WRITTEN(iseq, ci, nci);
3226  return nci;
3227 }
3228 
3229 static const struct rb_callinfo *
3230 ci_argc_set(const rb_iseq_t *iseq, const struct rb_callinfo *ci, int argc)
3231 {
3232  const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
3233  vm_ci_flag(ci),
3234  argc,
3235  vm_ci_kwarg(ci));
3236  RB_OBJ_WRITTEN(iseq, ci, nci);
3237  return nci;
3238 }
3239 
3240 #define vm_ci_simple(ci) (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)
3241 
3242 static int
3243 iseq_peephole_optimize(rb_iseq_t *iseq, LINK_ELEMENT *list, const int do_tailcallopt)
3244 {
3245  INSN *const iobj = (INSN *)list;
3246 
3247  again:
3248  optimize_checktype(iseq, iobj);
3249 
3250  if (IS_INSN_ID(iobj, jump)) {
3251  INSN *niobj, *diobj, *piobj;
3252  diobj = (INSN *)get_destination_insn(iobj);
3253  niobj = (INSN *)get_next_insn(iobj);
3254 
3255  if (diobj == niobj) {
3256  /*
3257  * jump LABEL
3258  * LABEL:
3259  * =>
3260  * LABEL:
3261  */
3262  unref_destination(iobj, 0);
3263  ELEM_REMOVE(&iobj->link);
3264  return COMPILE_OK;
3265  }
3266  else if (iobj != diobj && IS_INSN(&diobj->link) &&
3267  IS_INSN_ID(diobj, jump) &&
3268  OPERAND_AT(iobj, 0) != OPERAND_AT(diobj, 0) &&
3269  diobj->insn_info.events == 0) {
3270  /*
3271  * useless jump elimination:
3272  * jump LABEL1
3273  * ...
3274  * LABEL1:
3275  * jump LABEL2
3276  *
3277  * => in this case, first jump instruction should jump to
3278  * LABEL2 directly
3279  */
3280  if (replace_destination(iobj, diobj)) {
3281  remove_unreachable_chunk(iseq, iobj->link.next);
3282  goto again;
3283  }
3284  }
3285  else if (IS_INSN_ID(diobj, leave)) {
3286  /*
3287  * jump LABEL
3288  * ...
3289  * LABEL:
3290  * leave
3291  * =>
3292  * leave
3293  * ...
3294  * LABEL:
3295  * leave
3296  */
3297  /* replace */
3298  unref_destination(iobj, 0);
3299  iobj->insn_id = BIN(leave);
3300  iobj->operand_size = 0;
3301  iobj->insn_info = diobj->insn_info;
3302  goto again;
3303  }
3304  else if (IS_INSN(iobj->link.prev) &&
3305  (piobj = (INSN *)iobj->link.prev) &&
3306  (IS_INSN_ID(piobj, branchif) ||
3307  IS_INSN_ID(piobj, branchunless))) {
3308  INSN *pdiobj = (INSN *)get_destination_insn(piobj);
3309  if (niobj == pdiobj) {
3310  int refcnt = IS_LABEL(piobj->link.next) ?
3311  ((LABEL *)piobj->link.next)->refcnt : 0;
3312  /*
3313  * useless jump elimination (if/unless destination):
3314  * if L1
3315  * jump L2
3316  * L1:
3317  * ...
3318  * L2:
3319  *
3320  * ==>
3321  * unless L2
3322  * L1:
3323  * ...
3324  * L2:
3325  */
3326  piobj->insn_id = (IS_INSN_ID(piobj, branchif))
3327  ? BIN(branchunless) : BIN(branchif);
3328  if (replace_destination(piobj, iobj) && refcnt <= 1) {
3329  ELEM_REMOVE(&iobj->link);
3330  }
3331  else {
3332  /* TODO: replace other branch destinations too */
3333  }
3334  return COMPILE_OK;
3335  }
3336  else if (diobj == pdiobj) {
3337  /*
3338  * useless jump elimination (if/unless before jump):
3339  * L1:
3340  * ...
3341  * if L1
3342  * jump L1
3343  *
3344  * ==>
3345  * L1:
3346  * ...
3347  * pop
3348  * jump L1
3349  */
3350  INSN *popiobj = new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, BIN(pop), 0, 0);
3351  ELEM_REPLACE(&piobj->link, &popiobj->link);
3352  }
3353  }
3354  if (remove_unreachable_chunk(iseq, iobj->link.next)) {
3355  goto again;
3356  }
3357  }
3358 
3359  /*
3360  * putstring "beg"
3361  * putstring "end"
3362  * newrange excl
3363  *
3364  * ==>
3365  *
3366  * putobject "beg".."end"
3367  */
3368  if (IS_INSN_ID(iobj, newrange)) {
3369  INSN *const range = iobj;
3370  INSN *beg, *end;
3371  VALUE str_beg, str_end;
3372 
3373  if ((end = (INSN *)get_prev_insn(range)) != 0 &&
3374  is_frozen_putstring(end, &str_end) &&
3375  (beg = (INSN *)get_prev_insn(end)) != 0 &&
3376  is_frozen_putstring(beg, &str_beg)) {
3377  int excl = FIX2INT(OPERAND_AT(range, 0));
3378  VALUE lit_range = rb_range_new(str_beg, str_end, excl);
3379 
3380  ELEM_REMOVE(&beg->link);
3381  ELEM_REMOVE(&end->link);
3382  range->insn_id = BIN(putobject);
3383  OPERAND_AT(range, 0) = lit_range;
3384  RB_OBJ_WRITTEN(iseq, Qundef, lit_range);
3385  }
3386  }
3387 
3388  if (IS_INSN_ID(iobj, leave)) {
3389  remove_unreachable_chunk(iseq, iobj->link.next);
3390  }
3391 
3392  /*
3393  * ...
3394  * duparray [...]
3395  * concatarray | concattoarray
3396  * =>
3397  * ...
3398  * putobject [...]
3399  * concatarray | concattoarray
3400  */
3401  if (IS_INSN_ID(iobj, duparray)) {
3402  LINK_ELEMENT *next = iobj->link.next;
3403  if (IS_INSN(next) && (IS_INSN_ID(next, concatarray) || IS_INSN_ID(next, concattoarray))) {
3404  iobj->insn_id = BIN(putobject);
3405  }
3406  }
3407 
3408  /*
3409  * duparray [...]
3410  * send <calldata!mid:freeze, argc:0, ARGS_SIMPLE>, nil
3411  * =>
3412  * opt_ary_freeze [...], <calldata!mid:freeze, argc:0, ARGS_SIMPLE>
3413  */
3414  if (IS_INSN_ID(iobj, duparray)) {
3415  LINK_ELEMENT *next = iobj->link.next;
3416  if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3417  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(next, 0);
3418  const rb_iseq_t *blockiseq = (rb_iseq_t *)OPERAND_AT(next, 1);
3419 
3420  if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3421  VALUE ary = iobj->operands[0];
3422  rb_obj_reveal(ary, rb_cArray);
3423 
3424  iobj->insn_id = BIN(opt_ary_freeze);
3425  iobj->operand_size = 2;
3426  iobj->operands = compile_data_calloc2(iseq, iobj->operand_size, sizeof(VALUE));
3427  iobj->operands[0] = ary;
3428  iobj->operands[1] = (VALUE)ci;
3429  ELEM_REMOVE(next);
3430  }
3431  }
3432  }
3433 
3434  /*
3435  * duphash {...}
3436  * send <calldata!mid:freeze, argc:0, ARGS_SIMPLE>, nil
3437  * =>
3438  * opt_hash_freeze {...}, <calldata!mid:freeze, argc:0, ARGS_SIMPLE>
3439  */
3440  if (IS_INSN_ID(iobj, duphash)) {
3441  LINK_ELEMENT *next = iobj->link.next;
3442  if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3443  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(next, 0);
3444  const rb_iseq_t *blockiseq = (rb_iseq_t *)OPERAND_AT(next, 1);
3445 
3446  if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3447  VALUE hash = iobj->operands[0];
3448  rb_obj_reveal(hash, rb_cHash);
3449 
3450  iobj->insn_id = BIN(opt_hash_freeze);
3451  iobj->operand_size = 2;
3452  iobj->operands = compile_data_calloc2(iseq, iobj->operand_size, sizeof(VALUE));
3453  iobj->operands[0] = hash;
3454  iobj->operands[1] = (VALUE)ci;
3455  ELEM_REMOVE(next);
3456  }
3457  }
3458  }
3459 
3460  /*
3461  * newarray 0
3462  * send <calldata!mid:freeze, argc:0, ARGS_SIMPLE>, nil
3463  * =>
3464  * opt_ary_freeze [], <calldata!mid:freeze, argc:0, ARGS_SIMPLE>
3465  */
3466  if (IS_INSN_ID(iobj, newarray) && iobj->operands[0] == INT2FIX(0)) {
3467  LINK_ELEMENT *next = iobj->link.next;
3468  if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3469  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(next, 0);
3470  const rb_iseq_t *blockiseq = (rb_iseq_t *)OPERAND_AT(next, 1);
3471 
3472  if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3473  iobj->insn_id = BIN(opt_ary_freeze);
3474  iobj->operand_size = 2;
3475  iobj->operands = compile_data_calloc2(iseq, iobj->operand_size, sizeof(VALUE));
3476  iobj->operands[0] = rb_cArray_empty_frozen;
3477  iobj->operands[1] = (VALUE)ci;
3478  ELEM_REMOVE(next);
3479  }
3480  }
3481  }
3482 
3483  /*
3484  * newhash 0
3485  * send <calldata!mid:freeze, argc:0, ARGS_SIMPLE>, nil
3486  * =>
3487  * opt_hash_freeze {}, <calldata!mid:freeze, argc:0, ARGS_SIMPLE>
3488  */
3489  if (IS_INSN_ID(iobj, newhash) && iobj->operands[0] == INT2FIX(0)) {
3490  LINK_ELEMENT *next = iobj->link.next;
3491  if (IS_INSN(next) && (IS_INSN_ID(next, send))) {
3492  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(next, 0);
3493  const rb_iseq_t *blockiseq = (rb_iseq_t *)OPERAND_AT(next, 1);
3494 
3495  if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0 && blockiseq == NULL && vm_ci_mid(ci) == idFreeze) {
3496  iobj->insn_id = BIN(opt_hash_freeze);
3497  iobj->operand_size = 2;
3498  iobj->operands = compile_data_calloc2(iseq, iobj->operand_size, sizeof(VALUE));
3499  iobj->operands[0] = rb_cHash_empty_frozen;
3500  iobj->operands[1] = (VALUE)ci;
3501  ELEM_REMOVE(next);
3502  }
3503  }
3504  }
3505 
3506  if (IS_INSN_ID(iobj, branchif) ||
3507  IS_INSN_ID(iobj, branchnil) ||
3508  IS_INSN_ID(iobj, branchunless)) {
3509  /*
3510  * if L1
3511  * ...
3512  * L1:
3513  * jump L2
3514  * =>
3515  * if L2
3516  */
3517  INSN *nobj = (INSN *)get_destination_insn(iobj);
3518 
3519  /* This is super nasty hack!!!
3520  *
3521  * This jump-jump optimization may ignore event flags of the jump
3522  * instruction being skipped. Actually, Line 2 TracePoint event
3523  * is never fired in the following code:
3524  *
3525  * 1: raise if 1 == 2
3526  * 2: while true
3527  * 3: break
3528  * 4: end
3529  *
3530  * This is critical for coverage measurement. [Bug #15980]
3531  *
3532  * This is a stopgap measure: stop the jump-jump optimization if
3533  * coverage measurement is enabled and if the skipped instruction
3534  * has any event flag.
3535  *
3536  * Note that, still, TracePoint Line event does not occur on Line 2.
3537  * This should be fixed in future.
3538  */
3539  int stop_optimization =
3540  ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq) &&
3541  nobj->link.type == ISEQ_ELEMENT_INSN &&
3542  nobj->insn_info.events;
3543  if (!stop_optimization) {
3544  INSN *pobj = (INSN *)iobj->link.prev;
3545  int prev_dup = 0;
3546  if (pobj) {
3547  if (!IS_INSN(&pobj->link))
3548  pobj = 0;
3549  else if (IS_INSN_ID(pobj, dup))
3550  prev_dup = 1;
3551  }
3552 
3553  for (;;) {
3554  if (IS_INSN(&nobj->link) && IS_INSN_ID(nobj, jump)) {
3555  if (!replace_destination(iobj, nobj)) break;
3556  }
3557  else if (prev_dup && IS_INSN_ID(nobj, dup) &&
3558  !!(nobj = (INSN *)nobj->link.next) &&
3559  /* basic blocks, with no labels in the middle */
3560  nobj->insn_id == iobj->insn_id) {
3561  /*
3562  * dup
3563  * if L1
3564  * ...
3565  * L1:
3566  * dup
3567  * if L2
3568  * =>
3569  * dup
3570  * if L2
3571  * ...
3572  * L1:
3573  * dup
3574  * if L2
3575  */
3576  if (!replace_destination(iobj, nobj)) break;
3577  }
3578  else if (pobj) {
3579  /*
3580  * putnil
3581  * if L1
3582  * =>
3583  * # nothing
3584  *
3585  * putobject true
3586  * if L1
3587  * =>
3588  * jump L1
3589  *
3590  * putstring ".."
3591  * if L1
3592  * =>
3593  * jump L1
3594  *
3595  * putstring ".."
3596  * dup
3597  * if L1
3598  * =>
3599  * putstring ".."
3600  * jump L1
3601  *
3602  */
3603  int cond;
3604  if (prev_dup && IS_INSN(pobj->link.prev)) {
3605  pobj = (INSN *)pobj->link.prev;
3606  }
3607  if (IS_INSN_ID(pobj, putobject)) {
3608  cond = (IS_INSN_ID(iobj, branchif) ?
3609  OPERAND_AT(pobj, 0) != Qfalse :
3610  IS_INSN_ID(iobj, branchunless) ?
3611  OPERAND_AT(pobj, 0) == Qfalse :
3612  FALSE);
3613  }
3614  else if (IS_INSN_ID(pobj, putstring) ||
3615  IS_INSN_ID(pobj, duparray) ||
3616  IS_INSN_ID(pobj, newarray)) {
3617  cond = IS_INSN_ID(iobj, branchif);
3618  }
3619  else if (IS_INSN_ID(pobj, putnil)) {
3620  cond = !IS_INSN_ID(iobj, branchif);
3621  }
3622  else break;
3623  if (prev_dup || !IS_INSN_ID(pobj, newarray)) {
3624  ELEM_REMOVE(iobj->link.prev);
3625  }
3626  else if (!iseq_pop_newarray(iseq, pobj)) {
3627  pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(pop), 0, NULL);
3628  ELEM_INSERT_PREV(&iobj->link, &pobj->link);
3629  }
3630  if (cond) {
3631  if (prev_dup) {
3632  pobj = new_insn_core(iseq, pobj->insn_info.line_no, pobj->insn_info.node_id, BIN(putnil), 0, NULL);
3633  ELEM_INSERT_NEXT(&iobj->link, &pobj->link);
3634  }
3635  iobj->insn_id = BIN(jump);
3636  goto again;
3637  }
3638  else {
3639  unref_destination(iobj, 0);
3640  ELEM_REMOVE(&iobj->link);
3641  }
3642  break;
3643  }
3644  else break;
3645  nobj = (INSN *)get_destination_insn(nobj);
3646  }
3647  }
3648  }
3649 
3650  if (IS_INSN_ID(iobj, pop)) {
3651  /*
3652  * putself / putnil / putobject obj / putstring "..."
3653  * pop
3654  * =>
3655  * # do nothing
3656  */
3657  LINK_ELEMENT *prev = iobj->link.prev;
3658  if (IS_INSN(prev)) {
3659  enum ruby_vminsn_type previ = ((INSN *)prev)->insn_id;
3660  if (previ == BIN(putobject) || previ == BIN(putnil) ||
3661  previ == BIN(putself) || previ == BIN(putstring) ||
3662  previ == BIN(putchilledstring) ||
3663  previ == BIN(dup) ||
3664  previ == BIN(getlocal) ||
3665  previ == BIN(getblockparam) ||
3666  previ == BIN(getblockparamproxy) ||
3667  previ == BIN(getinstancevariable) ||
3668  previ == BIN(duparray)) {
3669  /* just push operand or static value and pop soon, no
3670  * side effects */
3671  ELEM_REMOVE(prev);
3672  ELEM_REMOVE(&iobj->link);
3673  }
3674  else if (previ == BIN(newarray) && iseq_pop_newarray(iseq, (INSN*)prev)) {
3675  ELEM_REMOVE(&iobj->link);
3676  }
3677  else if (previ == BIN(concatarray)) {
3678  INSN *piobj = (INSN *)prev;
3679  INSERT_BEFORE_INSN1(piobj, piobj->insn_info.line_no, piobj->insn_info.node_id, splatarray, Qfalse);
3680  INSN_OF(piobj) = BIN(pop);
3681  }
3682  else if (previ == BIN(concatstrings)) {
3683  if (OPERAND_AT(prev, 0) == INT2FIX(1)) {
3684  ELEM_REMOVE(prev);
3685  }
3686  else {
3687  ELEM_REMOVE(&iobj->link);
3688  INSN_OF(prev) = BIN(adjuststack);
3689  }
3690  }
3691  }
3692  }
3693 
3694  if (IS_INSN_ID(iobj, newarray) ||
3695  IS_INSN_ID(iobj, duparray) ||
3696  IS_INSN_ID(iobj, concatarray) ||
3697  IS_INSN_ID(iobj, splatarray) ||
3698  0) {
3699  /*
3700  * newarray N
3701  * splatarray
3702  * =>
3703  * newarray N
3704  * newarray always puts an array
3705  */
3706  LINK_ELEMENT *next = iobj->link.next;
3707  if (IS_INSN(next) && IS_INSN_ID(next, splatarray)) {
3708  /* remove splatarray following always-array insn */
3709  ELEM_REMOVE(next);
3710  }
3711  }
3712 
3713  if (IS_INSN_ID(iobj, newarray)) {
3714  LINK_ELEMENT *next = iobj->link.next;
3715  if (IS_INSN(next) && IS_INSN_ID(next, expandarray) &&
3716  OPERAND_AT(next, 1) == INT2FIX(0)) {
3717  VALUE op1, op2;
3718  op1 = OPERAND_AT(iobj, 0);
3719  op2 = OPERAND_AT(next, 0);
3720  ELEM_REMOVE(next);
3721 
3722  if (op1 == op2) {
3723  /*
3724  * newarray 2
3725  * expandarray 2, 0
3726  * =>
3727  * swap
3728  */
3729  if (op1 == INT2FIX(2)) {
3730  INSN_OF(iobj) = BIN(swap);
3731  iobj->operand_size = 0;
3732  }
3733  /*
3734  * newarray X
3735  * expandarray X, 0
3736  * =>
3737  * opt_reverse X
3738  */
3739  else {
3740  INSN_OF(iobj) = BIN(opt_reverse);
3741  }
3742  }
3743  else {
3744  long diff = FIX2LONG(op1) - FIX2LONG(op2);
3745  INSN_OF(iobj) = BIN(opt_reverse);
3746  OPERAND_AT(iobj, 0) = OPERAND_AT(next, 0);
3747 
3748  if (op1 > op2) {
3749  /* X > Y
3750  * newarray X
3751  * expandarray Y, 0
3752  * =>
3753  * pop * (Y-X)
3754  * opt_reverse Y
3755  */
3756  for (; diff > 0; diff--) {
3757  INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, pop);
3758  }
3759  }
3760  else { /* (op1 < op2) */
3761  /* X < Y
3762  * newarray X
3763  * expandarray Y, 0
3764  * =>
3765  * putnil * (Y-X)
3766  * opt_reverse Y
3767  */
3768  for (; diff < 0; diff++) {
3769  INSERT_BEFORE_INSN(iobj, iobj->insn_info.line_no, iobj->insn_info.node_id, putnil);
3770  }
3771  }
3772  }
3773  }
3774  }
3775 
3776  if (IS_INSN_ID(iobj, duparray)) {
3777  LINK_ELEMENT *next = iobj->link.next;
3778  /*
3779  * duparray obj
3780  * expandarray X, 0
3781  * =>
3782  * putobject obj
3783  * expandarray X, 0
3784  */
3785  if (IS_INSN(next) && IS_INSN_ID(next, expandarray)) {
3786  INSN_OF(iobj) = BIN(putobject);
3787  }
3788  }
3789 
3790  if (IS_INSN_ID(iobj, anytostring)) {
3791  LINK_ELEMENT *next = iobj->link.next;
3792  /*
3793  * anytostring
3794  * concatstrings 1
3795  * =>
3796  * anytostring
3797  */
3798  if (IS_INSN(next) && IS_INSN_ID(next, concatstrings) &&
3799  OPERAND_AT(next, 0) == INT2FIX(1)) {
3800  ELEM_REMOVE(next);
3801  }
3802  }
3803 
3804  if (IS_INSN_ID(iobj, putstring) || IS_INSN_ID(iobj, putchilledstring) ||
3805  (IS_INSN_ID(iobj, putobject) && RB_TYPE_P(OPERAND_AT(iobj, 0), T_STRING))) {
3806  /*
3807  * putstring ""
3808  * concatstrings N
3809  * =>
3810  * concatstrings N-1
3811  */
3812  if (IS_NEXT_INSN_ID(&iobj->link, concatstrings) &&
3813  RSTRING_LEN(OPERAND_AT(iobj, 0)) == 0) {
3814  INSN *next = (INSN *)iobj->link.next;
3815  if ((OPERAND_AT(next, 0) = FIXNUM_INC(OPERAND_AT(next, 0), -1)) == INT2FIX(1)) {
3816  ELEM_REMOVE(&next->link);
3817  }
3818  ELEM_REMOVE(&iobj->link);
3819  }
3820  }
3821 
3822  if (IS_INSN_ID(iobj, concatstrings)) {
3823  /*
3824  * concatstrings N
3825  * concatstrings M
3826  * =>
3827  * concatstrings N+M-1
3828  */
3829  LINK_ELEMENT *next = iobj->link.next;
3830  INSN *jump = 0;
3831  if (IS_INSN(next) && IS_INSN_ID(next, jump))
3832  next = get_destination_insn(jump = (INSN *)next);
3833  if (IS_INSN(next) && IS_INSN_ID(next, concatstrings)) {
3834  int n = FIX2INT(OPERAND_AT(iobj, 0)) + FIX2INT(OPERAND_AT(next, 0)) - 1;
3835  OPERAND_AT(iobj, 0) = INT2FIX(n);
3836  if (jump) {
3837  LABEL *label = ((LABEL *)OPERAND_AT(jump, 0));
3838  if (!--label->refcnt) {
3839  ELEM_REMOVE(&label->link);
3840  }
3841  else {
3842  label = NEW_LABEL(0);
3843  OPERAND_AT(jump, 0) = (VALUE)label;
3844  }
3845  label->refcnt++;
3846  ELEM_INSERT_NEXT(next, &label->link);
3847  CHECK(iseq_peephole_optimize(iseq, get_next_insn(jump), do_tailcallopt));
3848  }
3849  else {
3850  ELEM_REMOVE(next);
3851  }
3852  }
3853  }
3854 
3855  if (do_tailcallopt &&
3856  (IS_INSN_ID(iobj, send) ||
3857  IS_INSN_ID(iobj, opt_aref_with) ||
3858  IS_INSN_ID(iobj, opt_aset_with) ||
3859  IS_INSN_ID(iobj, invokesuper))) {
3860  /*
3861  * send ...
3862  * leave
3863  * =>
3864  * send ..., ... | VM_CALL_TAILCALL, ...
3865  * leave # unreachable
3866  */
3867  INSN *piobj = NULL;
3868  if (iobj->link.next) {
3869  LINK_ELEMENT *next = iobj->link.next;
3870  do {
3871  if (!IS_INSN(next)) {
3872  next = next->next;
3873  continue;
3874  }
3875  switch (INSN_OF(next)) {
3876  case BIN(nop):
3877  next = next->next;
3878  break;
3879  case BIN(jump):
3880  /* if cond
3881  * return tailcall
3882  * end
3883  */
3884  next = get_destination_insn((INSN *)next);
3885  break;
3886  case BIN(leave):
3887  piobj = iobj;
3888  /* fall through */
3889  default:
3890  next = NULL;
3891  break;
3892  }
3893  } while (next);
3894  }
3895 
3896  if (piobj) {
3897  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(piobj, 0);
3898  if (IS_INSN_ID(piobj, send) ||
3899  IS_INSN_ID(piobj, invokesuper)) {
3900  if (OPERAND_AT(piobj, 1) == 0) { /* no blockiseq */
3901  ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
3902  OPERAND_AT(piobj, 0) = (VALUE)ci;
3903  RB_OBJ_WRITTEN(iseq, Qundef, ci);
3904  }
3905  }
3906  else {
3907  ci = ci_flag_set(iseq, ci, VM_CALL_TAILCALL);
3908  OPERAND_AT(piobj, 0) = (VALUE)ci;
3909  RB_OBJ_WRITTEN(iseq, Qundef, ci);
3910  }
3911  }
3912  }
3913 
3914  if (IS_INSN_ID(iobj, dup)) {
3915  if (IS_NEXT_INSN_ID(&iobj->link, setlocal)) {
3916  LINK_ELEMENT *set1 = iobj->link.next, *set2 = NULL;
3917 
3918  /*
3919  * dup
3920  * setlocal x, y
3921  * setlocal x, y
3922  * =>
3923  * dup
3924  * setlocal x, y
3925  */
3926  if (IS_NEXT_INSN_ID(set1, setlocal)) {
3927  set2 = set1->next;
3928  if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
3929  OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
3930  ELEM_REMOVE(set1);
3931  ELEM_REMOVE(&iobj->link);
3932  }
3933  }
3934 
3935  /*
3936  * dup
3937  * setlocal x, y
3938  * dup
3939  * setlocal x, y
3940  * =>
3941  * dup
3942  * setlocal x, y
3943  */
3944  else if (IS_NEXT_INSN_ID(set1, dup) &&
3945  IS_NEXT_INSN_ID(set1->next, setlocal)) {
3946  set2 = set1->next->next;
3947  if (OPERAND_AT(set1, 0) == OPERAND_AT(set2, 0) &&
3948  OPERAND_AT(set1, 1) == OPERAND_AT(set2, 1)) {
3949  ELEM_REMOVE(set1->next);
3950  ELEM_REMOVE(set2);
3951  }
3952  }
3953  }
3954  }
3955 
3956  /*
3957  * getlocal x, y
3958  * dup
3959  * setlocal x, y
3960  * =>
3961  * dup
3962  */
3963  if (IS_INSN_ID(iobj, getlocal)) {
3964  LINK_ELEMENT *niobj = &iobj->link;
3965  if (IS_NEXT_INSN_ID(niobj, dup)) {
3966  niobj = niobj->next;
3967  }
3968  if (IS_NEXT_INSN_ID(niobj, setlocal)) {
3969  LINK_ELEMENT *set1 = niobj->next;
3970  if (OPERAND_AT(iobj, 0) == OPERAND_AT(set1, 0) &&
3971  OPERAND_AT(iobj, 1) == OPERAND_AT(set1, 1)) {
3972  ELEM_REMOVE(set1);
3973  ELEM_REMOVE(niobj);
3974  }
3975  }
3976  }
3977 
3978  /*
3979  * opt_invokebuiltin_delegate
3980  * trace
3981  * leave
3982  * =>
3983  * opt_invokebuiltin_delegate_leave
3984  * trace
3985  * leave
3986  */
3987  if (IS_INSN_ID(iobj, opt_invokebuiltin_delegate)) {
3988  if (IS_TRACE(iobj->link.next)) {
3989  if (IS_NEXT_INSN_ID(iobj->link.next, leave)) {
3990  iobj->insn_id = BIN(opt_invokebuiltin_delegate_leave);
3991  const struct rb_builtin_function *bf = (const struct rb_builtin_function *)iobj->operands[0];
3992  if (iobj == (INSN *)list && bf->argc == 0 && (iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF)) {
3993  iseq->body->builtin_attrs |= BUILTIN_ATTR_SINGLE_NOARG_LEAF;
3994  }
3995  }
3996  }
3997  }
3998 
3999  /*
4000  * getblockparam
4001  * branchif / branchunless
4002  * =>
4003  * getblockparamproxy
4004  * branchif / branchunless
4005  */
4006  if (IS_INSN_ID(iobj, getblockparam)) {
4007  if (IS_NEXT_INSN_ID(&iobj->link, branchif) || IS_NEXT_INSN_ID(&iobj->link, branchunless)) {
4008  iobj->insn_id = BIN(getblockparamproxy);
4009  }
4010  }
4011 
4012  if (IS_INSN_ID(iobj, splatarray) && OPERAND_AT(iobj, 0) == false) {
4013  LINK_ELEMENT *niobj = &iobj->link;
4014  if (IS_NEXT_INSN_ID(niobj, duphash)) {
4015  niobj = niobj->next;
4016  LINK_ELEMENT *siobj;
4017  unsigned int set_flags = 0, unset_flags = 0;
4018 
4019  /*
4020  * Eliminate hash allocation for f(*a, kw: 1)
4021  *
4022  * splatarray false
4023  * duphash
4024  * send ARGS_SPLAT|KW_SPLAT|KW_SPLAT_MUT and not ARGS_BLOCKARG
4025  * =>
4026  * splatarray false
4027  * putobject
4028  * send ARGS_SPLAT|KW_SPLAT
4029  */
4030  if (IS_NEXT_INSN_ID(niobj, send)) {
4031  siobj = niobj->next;
4032  set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT;
4033  unset_flags = VM_CALL_ARGS_BLOCKARG;
4034  }
4035  /*
4036  * Eliminate hash allocation for f(*a, kw: 1, &{arg,lvar,@iv})
4037  *
4038  * splatarray false
4039  * duphash
4040  * getlocal / getinstancevariable / getblockparamproxy
4041  * send ARGS_SPLAT|KW_SPLAT|KW_SPLAT_MUT|ARGS_BLOCKARG
4042  * =>
4043  * splatarray false
4044  * putobject
4045  * getlocal / getinstancevariable / getblockparamproxy
4046  * send ARGS_SPLAT|KW_SPLAT|ARGS_BLOCKARG
4047  */
4048  else if ((IS_NEXT_INSN_ID(niobj, getlocal) || IS_NEXT_INSN_ID(niobj, getinstancevariable) ||
4049  IS_NEXT_INSN_ID(niobj, getblockparamproxy)) && (IS_NEXT_INSN_ID(niobj->next, send))) {
4050  siobj = niobj->next->next;
4051  set_flags = VM_CALL_ARGS_SPLAT|VM_CALL_KW_SPLAT|VM_CALL_KW_SPLAT_MUT|VM_CALL_ARGS_BLOCKARG;
4052  }
4053 
4054  if (set_flags) {
4055  const struct rb_callinfo *ci = (const struct rb_callinfo *)OPERAND_AT(siobj, 0);
4056  unsigned int flags = vm_ci_flag(ci);
4057  if ((flags & set_flags) == set_flags && !(flags & unset_flags)) {
4058  ((INSN*)niobj)->insn_id = BIN(putobject);
4059  OPERAND_AT(niobj, 0) = rb_hash_freeze(rb_hash_resurrect(OPERAND_AT(niobj, 0)));
4060 
4061  const struct rb_callinfo *nci = vm_ci_new(vm_ci_mid(ci),
4062  flags & ~VM_CALL_KW_SPLAT_MUT, vm_ci_argc(ci), vm_ci_kwarg(ci));
4063  RB_OBJ_WRITTEN(iseq, ci, nci);
4064  OPERAND_AT(siobj, 0) = (VALUE)nci;
4065  }
4066  }
4067  }
4068  }
4069 
4070  return COMPILE_OK;
4071 }
4072 
4073 static int
4074 insn_set_specialized_instruction(rb_iseq_t *iseq, INSN *iobj, int insn_id)
4075 {
4076  iobj->insn_id = insn_id;
4077  iobj->operand_size = insn_len(insn_id) - 1;
4078  iobj->insn_info.events |= RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN;
4079 
4080  if (insn_id == BIN(opt_neq)) {
4081  VALUE original_ci = iobj->operands[0];
4082  iobj->operand_size = 2;
4083  iobj->operands = compile_data_calloc2(iseq, iobj->operand_size, sizeof(VALUE));
4084  iobj->operands[0] = (VALUE)new_callinfo(iseq, idEq, 1, 0, NULL, FALSE);
4085  iobj->operands[1] = original_ci;
4086  }
4087 
4088  return COMPILE_OK;
4089 }
4090 
4091 static int
4092 iseq_specialized_instruction(rb_iseq_t *iseq, INSN *iobj)
4093 {
4094  if (IS_INSN_ID(iobj, newarray) && iobj->link.next &&
4095  IS_INSN(iobj->link.next)) {
4096  /*
4097  * [a, b, ...].max/min -> a, b, c, opt_newarray_send max/min
4098  */
4099  INSN *niobj = (INSN *)iobj->link.next;
4100  if (IS_INSN_ID(niobj, send)) {
4101  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(niobj, 0);
4102  if (vm_ci_simple(ci) && vm_ci_argc(ci) == 0) {
4103  VALUE method = INT2FIX(0);
4104  switch (vm_ci_mid(ci)) {
4105  case idMax:
4106  method = INT2FIX(VM_OPT_NEWARRAY_SEND_MAX);
4107  break;
4108  case idMin:
4109  method = INT2FIX(VM_OPT_NEWARRAY_SEND_MIN);
4110  break;
4111  case idHash:
4112  method = INT2FIX(VM_OPT_NEWARRAY_SEND_HASH);
4113  break;
4114  }
4115 
4116  if (method != INT2FIX(0)) {
4117  VALUE num = iobj->operands[0];
4118  int operand_len = insn_len(BIN(opt_newarray_send)) - 1;
4119  iobj->insn_id = BIN(opt_newarray_send);
4120  iobj->operands = compile_data_calloc2(iseq, operand_len, sizeof(VALUE));
4121  iobj->operands[0] = num;
4122  iobj->operands[1] = method;
4123  iobj->operand_size = operand_len;
4124  ELEM_REMOVE(&niobj->link);
4125  return COMPILE_OK;
4126  }
4127  }
4128  }
4129  else if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4130  (IS_INSN_ID(niobj, putobject) && RB_TYPE_P(OPERAND_AT(niobj, 0), T_STRING))) &&
4131  IS_NEXT_INSN_ID(&niobj->link, send)) {
4132  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT((INSN *)niobj->link.next, 0);
4133  if (vm_ci_simple(ci) && vm_ci_argc(ci) == 1 && vm_ci_mid(ci) == idPack) {
4134  VALUE num = iobj->operands[0];
4135  int operand_len = insn_len(BIN(opt_newarray_send)) - 1;
4136  iobj->insn_id = BIN(opt_newarray_send);
4137  iobj->operands = compile_data_calloc2(iseq, operand_len, sizeof(VALUE));
4138  iobj->operands[0] = FIXNUM_INC(num, 1);
4139  iobj->operands[1] = INT2FIX(VM_OPT_NEWARRAY_SEND_PACK);
4140  iobj->operand_size = operand_len;
4141  ELEM_REMOVE(&iobj->link);
4142  ELEM_REMOVE(niobj->link.next);
4143  ELEM_INSERT_NEXT(&niobj->link, &iobj->link);
4144  return COMPILE_OK;
4145  }
4146  }
4147  // newarray n, putchilledstring "E", getlocal b, send :pack with {buffer: b}
4148  // -> putchilledstring "E", getlocal b, opt_newarray_send n+2, :pack, :buffer
4149  else if ((IS_INSN_ID(niobj, putstring) || IS_INSN_ID(niobj, putchilledstring) ||
4150  (IS_INSN_ID(niobj, putobject) && RB_TYPE_P(OPERAND_AT(niobj, 0), T_STRING))) &&
4151  IS_NEXT_INSN_ID(&niobj->link, getlocal) &&
4152  (niobj->link.next && IS_NEXT_INSN_ID(niobj->link.next, send))) {
4153  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT((INSN *)(niobj->link.next)->next, 0);
4154  const struct rb_callinfo_kwarg *kwarg = vm_ci_kwarg(ci);
4155  if (vm_ci_mid(ci) == idPack && vm_ci_argc(ci) == 2 &&
4156  (kwarg && kwarg->keyword_len == 1 && kwarg->keywords[0] == rb_id2sym(idBuffer))) {
4157  VALUE num = iobj->operands[0];
4158  int operand_len = insn_len(BIN(opt_newarray_send)) - 1;
4159  iobj->insn_id = BIN(opt_newarray_send);
4160  iobj->operands = compile_data_calloc2(iseq, operand_len, sizeof(VALUE));
4161  iobj->operands[0] = FIXNUM_INC(num, 2);
4162  iobj->operands[1] = INT2FIX(VM_OPT_NEWARRAY_SEND_PACK_BUFFER);
4163  iobj->operand_size = operand_len;
4164  // Remove the "send" insn.
4165  ELEM_REMOVE((niobj->link.next)->next);
4166  // Remove the modified insn from its original "newarray" position...
4167  ELEM_REMOVE(&iobj->link);
4168  // and insert it after the buffer insn.
4169  ELEM_INSERT_NEXT(niobj->link.next, &iobj->link);
4170  return COMPILE_OK;
4171  }
4172  }
4173  }
4174 
4175  if (IS_INSN_ID(iobj, send)) {
4176  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(iobj, 0);
4177  const rb_iseq_t *blockiseq = (rb_iseq_t *)OPERAND_AT(iobj, 1);
4178 
4179 #define SP_INSN(opt) insn_set_specialized_instruction(iseq, iobj, BIN(opt_##opt))
4180  if (vm_ci_simple(ci)) {
4181  switch (vm_ci_argc(ci)) {
4182  case 0:
4183  switch (vm_ci_mid(ci)) {
4184  case idLength: SP_INSN(length); return COMPILE_OK;
4185  case idSize: SP_INSN(size); return COMPILE_OK;
4186  case idEmptyP: SP_INSN(empty_p);return COMPILE_OK;
4187  case idNilP: SP_INSN(nil_p); return COMPILE_OK;
4188  case idSucc: SP_INSN(succ); return COMPILE_OK;
4189  case idNot: SP_INSN(not); return COMPILE_OK;
4190  }
4191  break;
4192  case 1:
4193  switch (vm_ci_mid(ci)) {
4194  case idPLUS: SP_INSN(plus); return COMPILE_OK;
4195  case idMINUS: SP_INSN(minus); return COMPILE_OK;
4196  case idMULT: SP_INSN(mult); return COMPILE_OK;
4197  case idDIV: SP_INSN(div); return COMPILE_OK;
4198  case idMOD: SP_INSN(mod); return COMPILE_OK;
4199  case idEq: SP_INSN(eq); return COMPILE_OK;
4200  case idNeq: SP_INSN(neq); return COMPILE_OK;
4201  case idEqTilde:SP_INSN(regexpmatch2);return COMPILE_OK;
4202  case idLT: SP_INSN(lt); return COMPILE_OK;
4203  case idLE: SP_INSN(le); return COMPILE_OK;
4204  case idGT: SP_INSN(gt); return COMPILE_OK;
4205  case idGE: SP_INSN(ge); return COMPILE_OK;
4206  case idLTLT: SP_INSN(ltlt); return COMPILE_OK;
4207  case idAREF: SP_INSN(aref); return COMPILE_OK;
4208  case idAnd: SP_INSN(and); return COMPILE_OK;
4209  case idOr: SP_INSN(or); return COMPILE_OK;
4210  }
4211  break;
4212  case 2:
4213  switch (vm_ci_mid(ci)) {
4214  case idASET: SP_INSN(aset); return COMPILE_OK;
4215  }
4216  break;
4217  }
4218  }
4219 
4220  if ((vm_ci_flag(ci) & (VM_CALL_ARGS_BLOCKARG | VM_CALL_FORWARDING)) == 0 && blockiseq == NULL) {
4221  iobj->insn_id = BIN(opt_send_without_block);
4222  iobj->operand_size = insn_len(iobj->insn_id) - 1;
4223  }
4224  }
4225 #undef SP_INSN
4226 
4227  return COMPILE_OK;
4228 }
4229 
4230 static inline int
4231 tailcallable_p(rb_iseq_t *iseq)
4232 {
4233  switch (ISEQ_BODY(iseq)->type) {
4234  case ISEQ_TYPE_TOP:
4235  case ISEQ_TYPE_EVAL:
4236  case ISEQ_TYPE_MAIN:
4237  /* not tail callable because cfp will be over popped */
4238  case ISEQ_TYPE_RESCUE:
4239  case ISEQ_TYPE_ENSURE:
4240  /* rescue block can't tail call because of errinfo */
4241  return FALSE;
4242  default:
4243  return TRUE;
4244  }
4245 }
4246 
4247 static int
4248 iseq_optimize(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
4249 {
4250  LINK_ELEMENT *list;
4251  const int do_peepholeopt = ISEQ_COMPILE_DATA(iseq)->option->peephole_optimization;
4252  const int do_tailcallopt = tailcallable_p(iseq) &&
4253  ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization;
4254  const int do_si = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction;
4255  const int do_ou = ISEQ_COMPILE_DATA(iseq)->option->operands_unification;
4256  int rescue_level = 0;
4257  int tailcallopt = do_tailcallopt;
4258 
4259  list = FIRST_ELEMENT(anchor);
4260 
4261  int do_block_optimization = 0;
4262 
4263  if (ISEQ_BODY(iseq)->type == ISEQ_TYPE_BLOCK && !ISEQ_COMPILE_DATA(iseq)->catch_except_p) {
4264  do_block_optimization = 1;
4265  }
4266 
4267  while (list) {
4268  if (IS_INSN(list)) {
4269  if (do_peepholeopt) {
4270  iseq_peephole_optimize(iseq, list, tailcallopt);
4271  }
4272  if (do_si) {
4273  iseq_specialized_instruction(iseq, (INSN *)list);
4274  }
4275  if (do_ou) {
4276  insn_operands_unification((INSN *)list);
4277  }
4278 
4279  if (do_block_optimization) {
4280  INSN * item = (INSN *)list;
4281  if (IS_INSN_ID(item, jump)) {
4282  do_block_optimization = 0;
4283  }
4284  }
4285  }
4286  if (IS_LABEL(list)) {
4287  switch (((LABEL *)list)->rescued) {
4288  case LABEL_RESCUE_BEG:
4289  rescue_level++;
4290  tailcallopt = FALSE;
4291  break;
4292  case LABEL_RESCUE_END:
4293  if (!--rescue_level) tailcallopt = do_tailcallopt;
4294  break;
4295  }
4296  }
4297  list = list->next;
4298  }
4299 
4300  if (do_block_optimization) {
4301  LINK_ELEMENT * le = FIRST_ELEMENT(anchor)->next;
4302  if (IS_INSN(le) && IS_INSN_ID((INSN *)le, nop)) {
4303  ELEM_REMOVE(le);
4304  }
4305  }
4306  return COMPILE_OK;
4307 }
4308 
4309 #if OPT_INSTRUCTIONS_UNIFICATION
4310 static INSN *
4311 new_unified_insn(rb_iseq_t *iseq,
4312  int insn_id, int size, LINK_ELEMENT *seq_list)
4313 {
4314  INSN *iobj = 0;
4315  LINK_ELEMENT *list = seq_list;
4316  int i, argc = 0;
4317  VALUE *operands = 0, *ptr = 0;
4318 
4319 
4320  /* count argc */
4321  for (i = 0; i < size; i++) {
4322  iobj = (INSN *)list;
4323  argc += iobj->operand_size;
4324  list = list->next;
4325  }
4326 
4327  if (argc > 0) {
4328  ptr = operands = compile_data_alloc2(iseq, sizeof(VALUE), argc);
4329  }
4330 
4331  /* copy operands */
4332  list = seq_list;
4333  for (i = 0; i < size; i++) {
4334  iobj = (INSN *)list;
4335  MEMCPY(ptr, iobj->operands, VALUE, iobj->operand_size);
4336  ptr += iobj->operand_size;
4337  list = list->next;
4338  }
4339 
4340  return new_insn_core(iseq, iobj->insn_info.line_no, iobj->insn_info.node_id, insn_id, argc, operands);
4341 }
4342 #endif
4343 
4344 /*
4345  * This scheme can get more performance if do this optimize with
4346  * label address resolving.
4347  * It's future work (if compile time was bottle neck).
4348  */
4349 static int
4350 iseq_insns_unification(rb_iseq_t *iseq, LINK_ANCHOR *const anchor)
4351 {
4352 #if OPT_INSTRUCTIONS_UNIFICATION
4353  LINK_ELEMENT *list;
4354  INSN *iobj, *niobj;
4355  int id, k;
4356  intptr_t j;
4357 
4358  list = FIRST_ELEMENT(anchor);
4359  while (list) {
4360  if (IS_INSN(list)) {
4361  iobj = (INSN *)list;
4362  id = iobj->insn_id;
4363  if (unified_insns_data[id] != 0) {
4364  const int *const *entry = unified_insns_data[id];
4365  for (j = 1; j < (intptr_t)entry[0]; j++) {
4366  const int *unified = entry[j];
4367  LINK_ELEMENT *li = list->next;
4368  for (k = 2; k < unified[1]; k++) {
4369  if (!IS_INSN(li) ||
4370  ((INSN *)li)->insn_id != unified[k]) {
4371  goto miss;
4372  }
4373  li = li->next;
4374  }
4375  /* matched */
4376  niobj =
4377  new_unified_insn(iseq, unified[0], unified[1] - 1,
4378  list);
4379 
4380  /* insert to list */
4381  niobj->link.prev = (LINK_ELEMENT *)iobj->link.prev;
4382  niobj->link.next = li;
4383  if (li) {
4384  li->prev = (LINK_ELEMENT *)niobj;
4385  }
4386 
4387  list->prev->next = (LINK_ELEMENT *)niobj;
4388  list = (LINK_ELEMENT *)niobj;
4389  break;
4390  miss:;
4391  }
4392  }
4393  }
4394  list = list->next;
4395  }
4396 #endif
4397  return COMPILE_OK;
4398 }
4399 
4400 static int
4401 all_string_result_p(const NODE *node)
4402 {
4403  if (!node) return FALSE;
4404  switch (nd_type(node)) {
4405  case NODE_STR: case NODE_DSTR: case NODE_FILE:
4406  return TRUE;
4407  case NODE_IF: case NODE_UNLESS:
4408  if (!RNODE_IF(node)->nd_body || !RNODE_IF(node)->nd_else) return FALSE;
4409  if (all_string_result_p(RNODE_IF(node)->nd_body))
4410  return all_string_result_p(RNODE_IF(node)->nd_else);
4411  return FALSE;
4412  case NODE_AND: case NODE_OR:
4413  if (!RNODE_AND(node)->nd_2nd)
4414  return all_string_result_p(RNODE_AND(node)->nd_1st);
4415  if (!all_string_result_p(RNODE_AND(node)->nd_1st))
4416  return FALSE;
4417  return all_string_result_p(RNODE_AND(node)->nd_2nd);
4418  default:
4419  return FALSE;
4420  }
4421 }
4422 
4423 static int
4424 compile_dstr_fragments(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int *cntp)
4425 {
4426  const struct RNode_LIST *list = RNODE_DSTR(node)->nd_next;
4427  VALUE lit = rb_node_dstr_string_val(node);
4428  LINK_ELEMENT *first_lit = 0;
4429  int cnt = 0;
4430 
4431  debugp_param("nd_lit", lit);
4432  if (!NIL_P(lit)) {
4433  cnt++;
4434  if (!RB_TYPE_P(lit, T_STRING)) {
4435  COMPILE_ERROR(ERROR_ARGS "dstr: must be string: %s",
4436  rb_builtin_type_name(TYPE(lit)));
4437  return COMPILE_NG;
4438  }
4439  lit = rb_fstring(lit);
4440  ADD_INSN1(ret, node, putobject, lit);
4441  RB_OBJ_WRITTEN(iseq, Qundef, lit);
4442  if (RSTRING_LEN(lit) == 0) first_lit = LAST_ELEMENT(ret);
4443  }
4444 
4445  while (list) {
4446  const NODE *const head = list->nd_head;
4447  if (nd_type_p(head, NODE_STR)) {
4448  lit = rb_node_str_string_val(head);
4449  ADD_INSN1(ret, head, putobject, lit);
4450  RB_OBJ_WRITTEN(iseq, Qundef, lit);
4451  lit = Qnil;
4452  }
4453  else {
4454  CHECK(COMPILE(ret, "each string", head));
4455  }
4456  cnt++;
4457  list = (struct RNode_LIST *)list->nd_next;
4458  }
4459  if (NIL_P(lit) && first_lit) {
4460  ELEM_REMOVE(first_lit);
4461  --cnt;
4462  }
4463  *cntp = cnt;
4464 
4465  return COMPILE_OK;
4466 }
4467 
4468 static int
4469 compile_block(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, int popped)
4470 {
4471  while (node && nd_type_p(node, NODE_BLOCK)) {
4472  CHECK(COMPILE_(ret, "BLOCK body", RNODE_BLOCK(node)->nd_head,
4473  (RNODE_BLOCK(node)->nd_next ? 1 : popped)));
4474  node = RNODE_BLOCK(node)->nd_next;
4475  }
4476  if (node) {
4477  CHECK(COMPILE_(ret, "BLOCK next", RNODE_BLOCK(node)->nd_next, popped));
4478  }
4479  return COMPILE_OK;
4480 }
4481 
4482 static int
4483 compile_dstr(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node)
4484 {
4485  int cnt;
4486  if (!RNODE_DSTR(node)->nd_next) {
4487  VALUE lit = rb_node_dstr_string_val(node);
4488  ADD_INSN1(ret, node, putstring, lit);
4489  RB_OBJ_WRITTEN(iseq, Qundef, lit);
4490  }
4491  else {
4492  CHECK(compile_dstr_fragments(iseq, ret, node, &cnt));
4493  ADD_INSN1(ret, node, concatstrings, INT2FIX(cnt));
4494  }
4495  return COMPILE_OK;
4496 }
4497 
4498 static int
4499 compile_dregx(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
4500 {
4501  int cnt;
4502  int cflag = (int)RNODE_DREGX(node)->as.nd_cflag;
4503 
4504  if (!RNODE_DREGX(node)->nd_next) {
4505  if (!popped) {
4506  VALUE src = rb_node_dregx_string_val(node);
4507  VALUE match = rb_reg_compile(src, cflag, NULL, 0);
4508  ADD_INSN1(ret, node, putobject, match);
4509  RB_OBJ_WRITTEN(iseq, Qundef, match);
4510  }
4511  return COMPILE_OK;
4512  }
4513 
4514  CHECK(compile_dstr_fragments(iseq, ret, node, &cnt));
4515  ADD_INSN2(ret, node, toregexp, INT2FIX(cflag), INT2FIX(cnt));
4516 
4517  if (popped) {
4518  ADD_INSN(ret, node, pop);
4519  }
4520 
4521  return COMPILE_OK;
4522 }
4523 
4524 static int
4525 compile_flip_flop(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int again,
4526  LABEL *then_label, LABEL *else_label)
4527 {
4528  const int line = nd_line(node);
4529  LABEL *lend = NEW_LABEL(line);
4530  rb_num_t cnt = ISEQ_FLIP_CNT_INCREMENT(ISEQ_BODY(iseq)->local_iseq)
4531  + VM_SVAR_FLIPFLOP_START;
4532  VALUE key = INT2FIX(cnt);
4533 
4534  ADD_INSN2(ret, node, getspecial, key, INT2FIX(0));
4535  ADD_INSNL(ret, node, branchif, lend);
4536 
4537  /* *flip == 0 */
4538  CHECK(COMPILE(ret, "flip2 beg", RNODE_FLIP2(node)->nd_beg));
4539  ADD_INSNL(ret, node, branchunless, else_label);
4540  ADD_INSN1(ret, node, putobject, Qtrue);
4541  ADD_INSN1(ret, node, setspecial, key);
4542  if (!again) {
4543  ADD_INSNL(ret, node, jump, then_label);
4544  }
4545 
4546  /* *flip == 1 */
4547  ADD_LABEL(ret, lend);
4548  CHECK(COMPILE(ret, "flip2 end", RNODE_FLIP2(node)->nd_end));
4549  ADD_INSNL(ret, node, branchunless, then_label);
4550  ADD_INSN1(ret, node, putobject, Qfalse);
4551  ADD_INSN1(ret, node, setspecial, key);
4552  ADD_INSNL(ret, node, jump, then_label);
4553 
4554  return COMPILE_OK;
4555 }
4556 
4557 static int
4558 compile_branch_condition(rb_iseq_t *iseq, LINK_ANCHOR *ret, const NODE *cond,
4559  LABEL *then_label, LABEL *else_label);
4560 
4561 #define COMPILE_SINGLE 2
4562 static int
4563 compile_logical(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *cond,
4564  LABEL *then_label, LABEL *else_label)
4565 {
4566  DECL_ANCHOR(seq);
4567  INIT_ANCHOR(seq);
4568  LABEL *label = NEW_LABEL(nd_line(cond));
4569  if (!then_label) then_label = label;
4570  else if (!else_label) else_label = label;
4571 
4572  CHECK(compile_branch_condition(iseq, seq, cond, then_label, else_label));
4573 
4574  if (LIST_INSN_SIZE_ONE(seq)) {
4575  INSN *insn = (INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(seq));
4576  if (insn->insn_id == BIN(jump) && (LABEL *)(insn->operands[0]) == label)
4577  return COMPILE_OK;
4578  }
4579  if (!label->refcnt) {
4580  return COMPILE_SINGLE;
4581  }
4582  ADD_LABEL(seq, label);
4583  ADD_SEQ(ret, seq);
4584  return COMPILE_OK;
4585 }
4586 
4587 static int
4588 compile_branch_condition(rb_iseq_t *iseq, LINK_ANCHOR *ret, const NODE *cond,
4589  LABEL *then_label, LABEL *else_label)
4590 {
4591  int ok;
4592  DECL_ANCHOR(ignore);
4593 
4594  again:
4595  switch (nd_type(cond)) {
4596  case NODE_AND:
4597  CHECK(ok = compile_logical(iseq, ret, RNODE_AND(cond)->nd_1st, NULL, else_label));
4598  cond = RNODE_AND(cond)->nd_2nd;
4599  if (ok == COMPILE_SINGLE) {
4600  INIT_ANCHOR(ignore);
4601  ret = ignore;
4602  then_label = NEW_LABEL(nd_line(cond));
4603  }
4604  goto again;
4605  case NODE_OR:
4606  CHECK(ok = compile_logical(iseq, ret, RNODE_OR(cond)->nd_1st, then_label, NULL));
4607  cond = RNODE_OR(cond)->nd_2nd;
4608  if (ok == COMPILE_SINGLE) {
4609  INIT_ANCHOR(ignore);
4610  ret = ignore;
4611  else_label = NEW_LABEL(nd_line(cond));
4612  }
4613  goto again;
4614  case NODE_SYM:
4615  case NODE_LINE:
4616  case NODE_FILE:
4617  case NODE_ENCODING:
4618  case NODE_INTEGER: /* NODE_INTEGER is always true */
4619  case NODE_FLOAT: /* NODE_FLOAT is always true */
4620  case NODE_RATIONAL: /* NODE_RATIONAL is always true */
4621  case NODE_IMAGINARY: /* NODE_IMAGINARY is always true */
4622  case NODE_TRUE:
4623  case NODE_STR:
4624  case NODE_REGX:
4625  case NODE_ZLIST:
4626  case NODE_LAMBDA:
4627  /* printf("useless condition eliminate (%s)\n", ruby_node_name(nd_type(cond))); */
4628  ADD_INSNL(ret, cond, jump, then_label);
4629  return COMPILE_OK;
4630  case NODE_FALSE:
4631  case NODE_NIL:
4632  /* printf("useless condition eliminate (%s)\n", ruby_node_name(nd_type(cond))); */
4633  ADD_INSNL(ret, cond, jump, else_label);
4634  return COMPILE_OK;
4635  case NODE_LIST:
4636  case NODE_ARGSCAT:
4637  case NODE_DREGX:
4638  case NODE_DSTR:
4639  CHECK(COMPILE_POPPED(ret, "branch condition", cond));
4640  ADD_INSNL(ret, cond, jump, then_label);
4641  return COMPILE_OK;
4642  case NODE_FLIP2:
4643  CHECK(compile_flip_flop(iseq, ret, cond, TRUE, then_label, else_label));
4644  return COMPILE_OK;
4645  case NODE_FLIP3:
4646  CHECK(compile_flip_flop(iseq, ret, cond, FALSE, then_label, else_label));
4647  return COMPILE_OK;
4648  case NODE_DEFINED:
4649  CHECK(compile_defined_expr(iseq, ret, cond, Qfalse, ret == ignore));
4650  break;
4651  default:
4652  {
4653  DECL_ANCHOR(cond_seq);
4654  INIT_ANCHOR(cond_seq);
4655 
4656  CHECK(COMPILE(cond_seq, "branch condition", cond));
4657 
4658  if (LIST_INSN_SIZE_ONE(cond_seq)) {
4659  INSN *insn = (INSN *)ELEM_FIRST_INSN(FIRST_ELEMENT(cond_seq));
4660  if (insn->insn_id == BIN(putobject)) {
4661  if (RTEST(insn->operands[0])) {
4662  ADD_INSNL(ret, cond, jump, then_label);
4663  // maybe unreachable
4664  return COMPILE_OK;
4665  }
4666  else {
4667  ADD_INSNL(ret, cond, jump, else_label);
4668  return COMPILE_OK;
4669  }
4670  }
4671  }
4672  ADD_SEQ(ret, cond_seq);
4673  }
4674  break;
4675  }
4676 
4677  ADD_INSNL(ret, cond, branchunless, else_label);
4678  ADD_INSNL(ret, cond, jump, then_label);
4679  return COMPILE_OK;
4680 }
4681 
4682 #define HASH_BRACE 1
4683 
4684 static int
4685 keyword_node_p(const NODE *const node)
4686 {
4687  return nd_type_p(node, NODE_HASH) && (RNODE_HASH(node)->nd_brace & HASH_BRACE) != HASH_BRACE;
4688 }
4689 
4690 static VALUE
4691 get_symbol_value(rb_iseq_t *iseq, const NODE *node)
4692 {
4693  switch (nd_type(node)) {
4694  case NODE_SYM:
4695  return rb_node_sym_string_val(node);
4696  default:
4697  UNKNOWN_NODE("get_symbol_value", node, Qnil);
4698  }
4699 }
4700 
4701 static VALUE
4702 node_hash_unique_key_index(rb_iseq_t *iseq, rb_node_hash_t *node_hash, int *count_ptr)
4703 {
4704  NODE *node = node_hash->nd_head;
4705  VALUE hash = rb_hash_new();
4706  VALUE ary = rb_ary_new();
4707 
4708  for (int i = 0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4709  VALUE key = get_symbol_value(iseq, RNODE_LIST(node)->nd_head);
4710  VALUE idx = rb_hash_aref(hash, key);
4711  if (!NIL_P(idx)) {
4712  rb_ary_store(ary, FIX2INT(idx), Qfalse);
4713  (*count_ptr)--;
4714  }
4715  rb_hash_aset(hash, key, INT2FIX(i));
4716  rb_ary_store(ary, i, Qtrue);
4717  (*count_ptr)++;
4718  }
4719 
4720  return ary;
4721 }
4722 
4723 static int
4724 compile_keyword_arg(rb_iseq_t *iseq, LINK_ANCHOR *const ret,
4725  const NODE *const root_node,
4726  struct rb_callinfo_kwarg **const kw_arg_ptr,
4727  unsigned int *flag)
4728 {
4729  RUBY_ASSERT(nd_type_p(root_node, NODE_HASH));
4730  RUBY_ASSERT(kw_arg_ptr != NULL);
4731  RUBY_ASSERT(flag != NULL);
4732 
4733  if (RNODE_HASH(root_node)->nd_head && nd_type_p(RNODE_HASH(root_node)->nd_head, NODE_LIST)) {
4734  const NODE *node = RNODE_HASH(root_node)->nd_head;
4735  int seen_nodes = 0;
4736 
4737  while (node) {
4738  const NODE *key_node = RNODE_LIST(node)->nd_head;
4739  seen_nodes++;
4740 
4741  RUBY_ASSERT(nd_type_p(node, NODE_LIST));
4742  if (key_node && nd_type_p(key_node, NODE_SYM)) {
4743  /* can be keywords */
4744  }
4745  else {
4746  if (flag) {
4747  *flag |= VM_CALL_KW_SPLAT;
4748  if (seen_nodes > 1 || RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4749  /* A new hash will be created for the keyword arguments
4750  * in this case, so mark the method as passing mutable
4751  * keyword splat.
4752  */
4753  *flag |= VM_CALL_KW_SPLAT_MUT;
4754  }
4755  }
4756  return FALSE;
4757  }
4758  node = RNODE_LIST(node)->nd_next; /* skip value node */
4759  node = RNODE_LIST(node)->nd_next;
4760  }
4761 
4762  /* may be keywords */
4763  node = RNODE_HASH(root_node)->nd_head;
4764  {
4765  int len = 0;
4766  VALUE key_index = node_hash_unique_key_index(iseq, RNODE_HASH(root_node), &len);
4767  struct rb_callinfo_kwarg *kw_arg =
4768  rb_xmalloc_mul_add(len, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
4769  VALUE *keywords = kw_arg->keywords;
4770  int i = 0;
4771  int j = 0;
4772  kw_arg->references = 0;
4773  kw_arg->keyword_len = len;
4774 
4775  *kw_arg_ptr = kw_arg;
4776 
4777  for (i=0; node != NULL; i++, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
4778  const NODE *key_node = RNODE_LIST(node)->nd_head;
4779  const NODE *val_node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
4780  int popped = TRUE;
4781  if (rb_ary_entry(key_index, i)) {
4782  keywords[j] = get_symbol_value(iseq, key_node);
4783  j++;
4784  popped = FALSE;
4785  }
4786  NO_CHECK(COMPILE_(ret, "keyword values", val_node, popped));
4787  }
4788  RUBY_ASSERT(j == len);
4789  return TRUE;
4790  }
4791  }
4792  return FALSE;
4793 }
4794 
4795 static int
4796 compile_args(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, NODE **kwnode_ptr)
4797 {
4798  int len = 0;
4799 
4800  for (; node; len++, node = RNODE_LIST(node)->nd_next) {
4801  if (CPDEBUG > 0) {
4802  EXPECT_NODE("compile_args", node, NODE_LIST, -1);
4803  }
4804 
4805  if (RNODE_LIST(node)->nd_next == NULL && keyword_node_p(RNODE_LIST(node)->nd_head)) { /* last node is kwnode */
4806  *kwnode_ptr = RNODE_LIST(node)->nd_head;
4807  }
4808  else {
4809  RUBY_ASSERT(!keyword_node_p(RNODE_LIST(node)->nd_head));
4810  NO_CHECK(COMPILE_(ret, "array element", RNODE_LIST(node)->nd_head, FALSE));
4811  }
4812  }
4813 
4814  return len;
4815 }
4816 
4817 static inline bool
4818 frozen_string_literal_p(const rb_iseq_t *iseq)
4819 {
4820  return ISEQ_COMPILE_DATA(iseq)->option->frozen_string_literal > 0;
4821 }
4822 
4823 static inline bool
4824 static_literal_node_p(const NODE *node, const rb_iseq_t *iseq, bool hash_key)
4825 {
4826  switch (nd_type(node)) {
4827  case NODE_SYM:
4828  case NODE_REGX:
4829  case NODE_LINE:
4830  case NODE_ENCODING:
4831  case NODE_INTEGER:
4832  case NODE_FLOAT:
4833  case NODE_RATIONAL:
4834  case NODE_IMAGINARY:
4835  case NODE_NIL:
4836  case NODE_TRUE:
4837  case NODE_FALSE:
4838  return TRUE;
4839  case NODE_STR:
4840  case NODE_FILE:
4841  return hash_key || frozen_string_literal_p(iseq);
4842  default:
4843  return FALSE;
4844  }
4845 }
4846 
4847 static inline VALUE
4848 static_literal_value(const NODE *node, rb_iseq_t *iseq)
4849 {
4850  switch (nd_type(node)) {
4851  case NODE_INTEGER:
4852  return rb_node_integer_literal_val(node);
4853  case NODE_FLOAT:
4854  return rb_node_float_literal_val(node);
4855  case NODE_RATIONAL:
4856  return rb_node_rational_literal_val(node);
4857  case NODE_IMAGINARY:
4858  return rb_node_imaginary_literal_val(node);
4859  case NODE_NIL:
4860  return Qnil;
4861  case NODE_TRUE:
4862  return Qtrue;
4863  case NODE_FALSE:
4864  return Qfalse;
4865  case NODE_SYM:
4866  return rb_node_sym_string_val(node);
4867  case NODE_REGX:
4868  return rb_node_regx_string_val(node);
4869  case NODE_LINE:
4870  return rb_node_line_lineno_val(node);
4871  case NODE_ENCODING:
4872  return rb_node_encoding_val(node);
4873  case NODE_FILE:
4874  case NODE_STR:
4875  if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal || RTEST(ruby_debug)) {
4876  VALUE lit = get_string_value(node);
4877  return rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), (int)nd_line(node));
4878  }
4879  else {
4880  return get_string_value(node);
4881  }
4882  default:
4883  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
4884  }
4885 }
4886 
4887 static int
4888 compile_array(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, int popped, bool first_chunk)
4889 {
4890  const NODE *line_node = node;
4891 
4892  if (nd_type_p(node, NODE_ZLIST)) {
4893  if (!popped) {
4894  ADD_INSN1(ret, line_node, newarray, INT2FIX(0));
4895  }
4896  return 0;
4897  }
4898 
4899  EXPECT_NODE("compile_array", node, NODE_LIST, -1);
4900 
4901  if (popped) {
4902  for (; node; node = RNODE_LIST(node)->nd_next) {
4903  NO_CHECK(COMPILE_(ret, "array element", RNODE_LIST(node)->nd_head, popped));
4904  }
4905  return 1;
4906  }
4907 
4908  /* Compilation of an array literal.
4909  * The following code is essentially the same as:
4910  *
4911  * for (int count = 0; node; count++; node->nd_next) {
4912  * compile(node->nd_head);
4913  * }
4914  * ADD_INSN(newarray, count);
4915  *
4916  * However, there are three points.
4917  *
4918  * - The code above causes stack overflow for a big string literal.
4919  * The following limits the stack length up to max_stack_len.
4920  *
4921  * [x1,x2,...,x10000] =>
4922  * push x1 ; push x2 ; ...; push x256; newarray 256;
4923  * push x257; push x258; ...; push x512; pushtoarray 256;
4924  * push x513; push x514; ...; push x768; pushtoarray 256;
4925  * ...
4926  *
4927  * - Long subarray can be optimized by pre-allocating a hidden array.
4928  *
4929  * [1,2,3,...,100] =>
4930  * duparray [1,2,3,...,100]
4931  *
4932  * [x, 1,2,3,...,100, z] =>
4933  * push x; newarray 1;
4934  * putobject [1,2,3,...,100] (<- hidden array); concattoarray;
4935  * push z; pushtoarray 1;
4936  *
4937  * - If the last element is a keyword, pushtoarraykwsplat should be emitted
4938  * to only push it onto the array if it is not empty
4939  * (Note: a keyword is NODE_HASH which is not static_literal_node_p.)
4940  *
4941  * [1,2,3,**kw] =>
4942  * putobject 1; putobject 2; putobject 3; newarray 3; ...; pushtoarraykwsplat kw
4943  */
4944 
4945  const int max_stack_len = 0x100;
4946  const int min_tmp_ary_len = 0x40;
4947  int stack_len = 0;
4948 
4949  /* Either create a new array, or push to the existing array */
4950 #define FLUSH_CHUNK \
4951  if (stack_len) { \
4952  if (first_chunk) ADD_INSN1(ret, line_node, newarray, INT2FIX(stack_len)); \
4953  else ADD_INSN1(ret, line_node, pushtoarray, INT2FIX(stack_len)); \
4954  first_chunk = FALSE; \
4955  stack_len = 0; \
4956  }
4957 
4958  while (node) {
4959  int count = 1;
4960 
4961  /* pre-allocation check (this branch can be omittable) */
4962  if (static_literal_node_p(RNODE_LIST(node)->nd_head, iseq, false)) {
4963  /* count the elements that are optimizable */
4964  const NODE *node_tmp = RNODE_LIST(node)->nd_next;
4965  for (; node_tmp && static_literal_node_p(RNODE_LIST(node_tmp)->nd_head, iseq, false); node_tmp = RNODE_LIST(node_tmp)->nd_next)
4966  count++;
4967 
4968  if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_ary_len) {
4969  /* The literal contains only optimizable elements, or the subarray is long enough */
4970  VALUE ary = rb_ary_hidden_new(count);
4971 
4972  /* Create a hidden array */
4973  for (; count; count--, node = RNODE_LIST(node)->nd_next)
4974  rb_ary_push(ary, static_literal_value(RNODE_LIST(node)->nd_head, iseq));
4975  OBJ_FREEZE(ary);
4976 
4977  /* Emit optimized code */
4978  FLUSH_CHUNK;
4979  if (first_chunk) {
4980  ADD_INSN1(ret, line_node, duparray, ary);
4981  first_chunk = FALSE;
4982  }
4983  else {
4984  ADD_INSN1(ret, line_node, putobject, ary);
4985  ADD_INSN(ret, line_node, concattoarray);
4986  }
4987  RB_OBJ_WRITTEN(iseq, Qundef, ary);
4988  }
4989  }
4990 
4991  /* Base case: Compile "count" elements */
4992  for (; count; count--, node = RNODE_LIST(node)->nd_next) {
4993  if (CPDEBUG > 0) {
4994  EXPECT_NODE("compile_array", node, NODE_LIST, -1);
4995  }
4996 
4997  if (!RNODE_LIST(node)->nd_next && keyword_node_p(RNODE_LIST(node)->nd_head)) {
4998  /* Create array or push existing non-keyword elements onto array */
4999  if (stack_len == 0 && first_chunk) {
5000  ADD_INSN1(ret, line_node, newarray, INT2FIX(0));
5001  }
5002  else {
5003  FLUSH_CHUNK;
5004  }
5005  NO_CHECK(COMPILE_(ret, "array element", RNODE_LIST(node)->nd_head, 0));
5006  ADD_INSN(ret, line_node, pushtoarraykwsplat);
5007  return 1;
5008  }
5009  else {
5010  NO_CHECK(COMPILE_(ret, "array element", RNODE_LIST(node)->nd_head, 0));
5011  stack_len++;
5012  }
5013 
5014  /* If there are many pushed elements, flush them to avoid stack overflow */
5015  if (stack_len >= max_stack_len) FLUSH_CHUNK;
5016  }
5017  }
5018 
5019  FLUSH_CHUNK;
5020 #undef FLUSH_CHUNK
5021  return 1;
5022 }
5023 
5024 static inline int
5025 static_literal_node_pair_p(const NODE *node, const rb_iseq_t *iseq)
5026 {
5027  return RNODE_LIST(node)->nd_head && static_literal_node_p(RNODE_LIST(node)->nd_head, iseq, true) && static_literal_node_p(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq, false);
5028 }
5029 
5030 static int
5031 compile_hash(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, int method_call_keywords, int popped)
5032 {
5033  const NODE *line_node = node;
5034 
5035  node = RNODE_HASH(node)->nd_head;
5036 
5037  if (!node || nd_type_p(node, NODE_ZLIST)) {
5038  if (!popped) {
5039  ADD_INSN1(ret, line_node, newhash, INT2FIX(0));
5040  }
5041  return 0;
5042  }
5043 
5044  EXPECT_NODE("compile_hash", node, NODE_LIST, -1);
5045 
5046  if (popped) {
5047  for (; node; node = RNODE_LIST(node)->nd_next) {
5048  NO_CHECK(COMPILE_(ret, "hash element", RNODE_LIST(node)->nd_head, popped));
5049  }
5050  return 1;
5051  }
5052 
5053  /* Compilation of a hash literal (or keyword arguments).
5054  * This is very similar to compile_array, but there are some differences:
5055  *
5056  * - It contains key-value pairs. So we need to take every two elements.
5057  * We can assume that the length is always even.
5058  *
5059  * - Merging is done by a method call (id_core_hash_merge_ptr).
5060  * Sometimes we need to insert the receiver, so "anchor" is needed.
5061  * In addition, a method call is much slower than concatarray.
5062  * So it pays only when the subsequence is really long.
5063  * (min_tmp_hash_len must be much larger than min_tmp_ary_len.)
5064  *
5065  * - We need to handle keyword splat: **kw.
5066  * For **kw, the key part (node->nd_head) is NULL, and the value part
5067  * (node->nd_next->nd_head) is "kw".
5068  * The code is a bit difficult to avoid hash allocation for **{}.
5069  */
5070 
5071  const int max_stack_len = 0x100;
5072  const int min_tmp_hash_len = 0x800;
5073  int stack_len = 0;
5074  int first_chunk = 1;
5075  DECL_ANCHOR(anchor);
5076  INIT_ANCHOR(anchor);
5077 
5078  /* Convert pushed elements to a hash, and merge if needed */
5079 #define FLUSH_CHUNK() \
5080  if (stack_len) { \
5081  if (first_chunk) { \
5082  APPEND_LIST(ret, anchor); \
5083  ADD_INSN1(ret, line_node, newhash, INT2FIX(stack_len)); \
5084  } \
5085  else { \
5086  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
5087  ADD_INSN(ret, line_node, swap); \
5088  APPEND_LIST(ret, anchor); \
5089  ADD_SEND(ret, line_node, id_core_hash_merge_ptr, INT2FIX(stack_len + 1)); \
5090  } \
5091  INIT_ANCHOR(anchor); \
5092  first_chunk = stack_len = 0; \
5093  }
5094 
5095  while (node) {
5096  int count = 1;
5097 
5098  /* pre-allocation check (this branch can be omittable) */
5099  if (static_literal_node_pair_p(node, iseq)) {
5100  /* count the elements that are optimizable */
5101  const NODE *node_tmp = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next;
5102  for (; node_tmp && static_literal_node_pair_p(node_tmp, iseq); node_tmp = RNODE_LIST(RNODE_LIST(node_tmp)->nd_next)->nd_next)
5103  count++;
5104 
5105  if ((first_chunk && stack_len == 0 && !node_tmp) || count >= min_tmp_hash_len) {
5106  /* The literal contains only optimizable elements, or the subsequence is long enough */
5107  VALUE ary = rb_ary_hidden_new(count);
5108 
5109  /* Create a hidden hash */
5110  for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5111  VALUE elem[2];
5112  elem[0] = static_literal_value(RNODE_LIST(node)->nd_head, iseq);
5113  elem[1] = static_literal_value(RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, iseq);
5114  rb_ary_cat(ary, elem, 2);
5115  }
5116  VALUE hash = rb_hash_new_with_size(RARRAY_LEN(ary) / 2);
5118  hash = rb_obj_hide(hash);
5119  OBJ_FREEZE(hash);
5120 
5121  /* Emit optimized code */
5122  FLUSH_CHUNK();
5123  if (first_chunk) {
5124  ADD_INSN1(ret, line_node, duphash, hash);
5125  first_chunk = 0;
5126  }
5127  else {
5128  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5129  ADD_INSN(ret, line_node, swap);
5130 
5131  ADD_INSN1(ret, line_node, putobject, hash);
5132 
5133  ADD_SEND(ret, line_node, id_core_hash_merge_kwd, INT2FIX(2));
5134  }
5135  RB_OBJ_WRITTEN(iseq, Qundef, hash);
5136  }
5137  }
5138 
5139  /* Base case: Compile "count" elements */
5140  for (; count; count--, node = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next) {
5141 
5142  if (CPDEBUG > 0) {
5143  EXPECT_NODE("compile_hash", node, NODE_LIST, -1);
5144  }
5145 
5146  if (RNODE_LIST(node)->nd_head) {
5147  /* Normal key-value pair */
5148  NO_CHECK(COMPILE_(anchor, "hash key element", RNODE_LIST(node)->nd_head, 0));
5149  NO_CHECK(COMPILE_(anchor, "hash value element", RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head, 0));
5150  stack_len += 2;
5151 
5152  /* If there are many pushed elements, flush them to avoid stack overflow */
5153  if (stack_len >= max_stack_len) FLUSH_CHUNK();
5154  }
5155  else {
5156  /* kwsplat case: foo(..., **kw, ...) */
5157  FLUSH_CHUNK();
5158 
5159  const NODE *kw = RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_head;
5160  int empty_kw = nd_type_p(kw, NODE_HASH) && (!RNODE_HASH(kw)->nd_head); /* foo( ..., **{}, ...) */
5161  int first_kw = first_chunk && stack_len == 0; /* foo(1,2,3, **kw, ...) */
5162  int last_kw = !RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next; /* foo( ..., **kw) */
5163  int only_kw = last_kw && first_kw; /* foo(1,2,3, **kw) */
5164 
5165  empty_kw = empty_kw || nd_type_p(kw, NODE_NIL); /* foo( ..., **nil, ...) */
5166  if (empty_kw) {
5167  if (only_kw && method_call_keywords) {
5168  /* **{} appears at the only keyword argument in method call,
5169  * so it won't be modified.
5170  * kw is a special NODE_LIT that contains a special empty hash,
5171  * so this emits: putobject {}.
5172  * This is only done for method calls and not for literal hashes,
5173  * because literal hashes should always result in a new hash.
5174  */
5175  NO_CHECK(COMPILE(ret, "keyword splat", kw));
5176  }
5177  else if (first_kw) {
5178  /* **{} appears as the first keyword argument, so it may be modified.
5179  * We need to create a fresh hash object.
5180  */
5181  ADD_INSN1(ret, line_node, newhash, INT2FIX(0));
5182  }
5183  /* Any empty keyword splats that are not the first can be ignored.
5184  * since merging an empty hash into the existing hash is the same
5185  * as not merging it. */
5186  }
5187  else {
5188  if (only_kw && method_call_keywords) {
5189  /* **kw is only keyword argument in method call.
5190  * Use directly. This will be not be flagged as mutable.
5191  * This is only done for method calls and not for literal hashes,
5192  * because literal hashes should always result in a new hash.
5193  */
5194  NO_CHECK(COMPILE(ret, "keyword splat", kw));
5195  }
5196  else {
5197  /* There is more than one keyword argument, or this is not a method
5198  * call. In that case, we need to add an empty hash (if first keyword),
5199  * or merge the hash to the accumulated hash (if not the first keyword).
5200  */
5201  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5202  if (first_kw) ADD_INSN1(ret, line_node, newhash, INT2FIX(0));
5203  else ADD_INSN(ret, line_node, swap);
5204 
5205  NO_CHECK(COMPILE(ret, "keyword splat", kw));
5206 
5207  ADD_SEND(ret, line_node, id_core_hash_merge_kwd, INT2FIX(2));
5208  }
5209  }
5210 
5211  first_chunk = 0;
5212  }
5213  }
5214  }
5215 
5216  FLUSH_CHUNK();
5217 #undef FLUSH_CHUNK
5218  return 1;
5219 }
5220 
5221 VALUE
5222 rb_node_case_when_optimizable_literal(const NODE *const node)
5223 {
5224  switch (nd_type(node)) {
5225  case NODE_INTEGER:
5226  return rb_node_integer_literal_val(node);
5227  case NODE_FLOAT: {
5228  VALUE v = rb_node_float_literal_val(node);
5229  double ival;
5230 
5231  if (modf(RFLOAT_VALUE(v), &ival) == 0.0) {
5232  return FIXABLE(ival) ? LONG2FIX((long)ival) : rb_dbl2big(ival);
5233  }
5234  return v;
5235  }
5236  case NODE_RATIONAL:
5237  case NODE_IMAGINARY:
5238  return Qundef;
5239  case NODE_NIL:
5240  return Qnil;
5241  case NODE_TRUE:
5242  return Qtrue;
5243  case NODE_FALSE:
5244  return Qfalse;
5245  case NODE_SYM:
5246  return rb_node_sym_string_val(node);
5247  case NODE_LINE:
5248  return rb_node_line_lineno_val(node);
5249  case NODE_STR:
5250  return rb_node_str_string_val(node);
5251  case NODE_FILE:
5252  return rb_node_file_path_val(node);
5253  }
5254  return Qundef;
5255 }
5256 
5257 static int
5258 when_vals(rb_iseq_t *iseq, LINK_ANCHOR *const cond_seq, const NODE *vals,
5259  LABEL *l1, int only_special_literals, VALUE literals)
5260 {
5261  while (vals) {
5262  const NODE *val = RNODE_LIST(vals)->nd_head;
5263  VALUE lit = rb_node_case_when_optimizable_literal(val);
5264 
5265  if (UNDEF_P(lit)) {
5266  only_special_literals = 0;
5267  }
5268  else if (NIL_P(rb_hash_lookup(literals, lit))) {
5269  rb_hash_aset(literals, lit, (VALUE)(l1) | 1);
5270  }
5271 
5272  if (nd_type_p(val, NODE_STR) || nd_type_p(val, NODE_FILE)) {
5273  debugp_param("nd_lit", get_string_value(val));
5274  lit = get_string_value(val);
5275  ADD_INSN1(cond_seq, val, putobject, lit);
5276  RB_OBJ_WRITTEN(iseq, Qundef, lit);
5277  }
5278  else {
5279  if (!COMPILE(cond_seq, "when cond", val)) return -1;
5280  }
5281 
5282  // Emit pattern === target
5283  ADD_INSN1(cond_seq, vals, topn, INT2FIX(1));
5284  ADD_CALL(cond_seq, vals, idEqq, INT2FIX(1));
5285  ADD_INSNL(cond_seq, val, branchif, l1);
5286  vals = RNODE_LIST(vals)->nd_next;
5287  }
5288  return only_special_literals;
5289 }
5290 
5291 static int
5292 when_splat_vals(rb_iseq_t *iseq, LINK_ANCHOR *const cond_seq, const NODE *vals,
5293  LABEL *l1, int only_special_literals, VALUE literals)
5294 {
5295  const NODE *line_node = vals;
5296 
5297  switch (nd_type(vals)) {
5298  case NODE_LIST:
5299  if (when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals) < 0)
5300  return COMPILE_NG;
5301  break;
5302  case NODE_SPLAT:
5303  ADD_INSN (cond_seq, line_node, dup);
5304  CHECK(COMPILE(cond_seq, "when splat", RNODE_SPLAT(vals)->nd_head));
5305  ADD_INSN1(cond_seq, line_node, splatarray, Qfalse);
5306  ADD_INSN1(cond_seq, line_node, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5307  ADD_INSNL(cond_seq, line_node, branchif, l1);
5308  break;
5309  case NODE_ARGSCAT:
5310  CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_head, l1, only_special_literals, literals));
5311  CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSCAT(vals)->nd_body, l1, only_special_literals, literals));
5312  break;
5313  case NODE_ARGSPUSH:
5314  CHECK(when_splat_vals(iseq, cond_seq, RNODE_ARGSPUSH(vals)->nd_head, l1, only_special_literals, literals));
5315  ADD_INSN (cond_seq, line_node, dup);
5316  CHECK(COMPILE(cond_seq, "when argspush body", RNODE_ARGSPUSH(vals)->nd_body));
5317  ADD_INSN1(cond_seq, line_node, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE));
5318  ADD_INSNL(cond_seq, line_node, branchif, l1);
5319  break;
5320  default:
5321  ADD_INSN (cond_seq, line_node, dup);
5322  CHECK(COMPILE(cond_seq, "when val", vals));
5323  ADD_INSN1(cond_seq, line_node, splatarray, Qfalse);
5324  ADD_INSN1(cond_seq, line_node, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
5325  ADD_INSNL(cond_seq, line_node, branchif, l1);
5326  break;
5327  }
5328  return COMPILE_OK;
5329 }
5330 
5331 /* Multiple Assignment Handling
5332  *
5333  * In order to handle evaluation of multiple assignment such that the left hand side
5334  * is evaluated before the right hand side, we need to process the left hand side
5335  * and see if there are any attributes that need to be assigned, or constants set
5336  * on explicit objects. If so, we add instructions to evaluate the receiver of
5337  * any assigned attributes or constants before we process the right hand side.
5338  *
5339  * For a multiple assignment such as:
5340  *
5341  * l1.m1, l2[0] = r3, r4
5342  *
5343  * We start off evaluating l1 and l2, then we evaluate r3 and r4, then we
5344  * assign the result of r3 to l1.m1, and then the result of r4 to l2.m2.
5345  * On the VM stack, this looks like:
5346  *
5347  * self # putself
5348  * l1 # send
5349  * l1, self # putself
5350  * l1, l2 # send
5351  * l1, l2, 0 # putobject 0
5352  * l1, l2, 0, [r3, r4] # after evaluation of RHS
5353  * l1, l2, 0, [r3, r4], r4, r3 # expandarray
5354  * l1, l2, 0, [r3, r4], r4, r3, l1 # topn 5
5355  * l1, l2, 0, [r3, r4], r4, l1, r3 # swap
5356  * l1, l2, 0, [r3, r4], r4, m1= # send
5357  * l1, l2, 0, [r3, r4], r4 # pop
5358  * l1, l2, 0, [r3, r4], r4, l2 # topn 3
5359  * l1, l2, 0, [r3, r4], r4, l2, 0 # topn 3
5360  * l1, l2, 0, [r3, r4], r4, l2, 0, r4 # topn 2
5361  * l1, l2, 0, [r3, r4], r4, []= # send
5362  * l1, l2, 0, [r3, r4], r4 # pop
5363  * l1, l2, 0, [r3, r4] # pop
5364  * [r3, r4], l2, 0, [r3, r4] # setn 3
5365  * [r3, r4], l2, 0 # pop
5366  * [r3, r4], l2 # pop
5367  * [r3, r4] # pop
5368  *
5369  * This is made more complex when you have to handle splats, post args,
5370  * and arbitrary levels of nesting. You need to keep track of the total
5371  * number of attributes to set, and for each attribute, how many entries
5372  * are on the stack before the final attribute, in order to correctly
5373  * calculate the topn value to use to get the receiver of the attribute
5374  * setter method.
5375  *
5376  * A brief description of the VM stack for simple multiple assignment
5377  * with no splat (rhs_array will not be present if the return value of
5378  * the multiple assignment is not needed):
5379  *
5380  * lhs_attr1, lhs_attr2, ..., rhs_array, ..., rhs_arg2, rhs_arg1
5381  *
5382  * For multiple assignment with splats, while processing the part before
5383  * the splat (splat+post here is an array of the splat and the post arguments):
5384  *
5385  * lhs_attr1, lhs_attr2, ..., rhs_array, splat+post, ..., rhs_arg2, rhs_arg1
5386  *
5387  * When processing the splat and post arguments:
5388  *
5389  * lhs_attr1, lhs_attr2, ..., rhs_array, ..., post_arg2, post_arg1, splat
5390  *
5391  * When processing nested multiple assignment, existing values on the stack
5392  * are kept. So for:
5393  *
5394  * (l1.m1, l2.m2), l3.m3, l4* = [r1, r2], r3, r4
5395  *
5396  * The stack layout would be the following before processing the nested
5397  * multiple assignment:
5398  *
5399  * l1, l2, [[r1, r2], r3, r4], [r4], r3, [r1, r2]
5400  *
5401  * In order to handle this correctly, we need to keep track of the nesting
5402  * level for each attribute assignment, as well as the attribute number
5403  * (left hand side attributes are processed left to right) and number of
5404  * arguments to pass to the setter method. struct masgn_lhs_node tracks
5405  * this information.
5406  *
5407  * We also need to track information for the entire multiple assignment, such
5408  * as the total number of arguments, and the current nesting level, to
5409  * handle both nested multiple assignment as well as cases where the
5410  * rhs is not needed. We also need to keep track of all attribute
5411  * assignments in this, which we do using a linked listed. struct masgn_state
5412  * tracks this information.
5413  */
5414 
5416  INSN *before_insn;
5417  struct masgn_lhs_node *next;
5418  const NODE *line_node;
5419  int argn;
5420  int num_args;
5421  int lhs_pos;
5422 };
5423 
5424 struct masgn_state {
5425  struct masgn_lhs_node *first_memo;
5426  struct masgn_lhs_node *last_memo;
5427  int lhs_level;
5428  int num_args;
5429  bool nested;
5430 };
5431 
5432 static int
5433 add_masgn_lhs_node(struct masgn_state *state, int lhs_pos, const NODE *line_node, int argc, INSN *before_insn)
5434 {
5435  if (!state) {
5436  rb_bug("no masgn_state");
5437  }
5438 
5439  struct masgn_lhs_node *memo;
5440  memo = malloc(sizeof(struct masgn_lhs_node));
5441  if (!memo) {
5442  return COMPILE_NG;
5443  }
5444 
5445  memo->before_insn = before_insn;
5446  memo->line_node = line_node;
5447  memo->argn = state->num_args + 1;
5448  memo->num_args = argc;
5449  state->num_args += argc;
5450  memo->lhs_pos = lhs_pos;
5451  memo->next = NULL;
5452  if (!state->first_memo) {
5453  state->first_memo = memo;
5454  }
5455  else {
5456  state->last_memo->next = memo;
5457  }
5458  state->last_memo = memo;
5459 
5460  return COMPILE_OK;
5461 }
5462 
5463 static int compile_massign0(rb_iseq_t *iseq, LINK_ANCHOR *const pre, LINK_ANCHOR *const rhs, LINK_ANCHOR *const lhs, LINK_ANCHOR *const post, const NODE *const node, struct masgn_state *state, int popped);
5464 
5465 static int
5466 compile_massign_lhs(rb_iseq_t *iseq, LINK_ANCHOR *const pre, LINK_ANCHOR *const rhs, LINK_ANCHOR *const lhs, LINK_ANCHOR *const post, const NODE *const node, struct masgn_state *state, int lhs_pos)
5467 {
5468  switch (nd_type(node)) {
5469  case NODE_ATTRASGN: {
5470  INSN *iobj;
5471  const NODE *line_node = node;
5472 
5473  CHECK(COMPILE_POPPED(pre, "masgn lhs (NODE_ATTRASGN)", node));
5474 
5475  bool safenav_call = false;
5476  LINK_ELEMENT *insn_element = LAST_ELEMENT(pre);
5477  iobj = (INSN *)get_prev_insn((INSN *)insn_element); /* send insn */
5478  ASSUME(iobj);
5479  ELEM_REMOVE(insn_element);
5480  if (!IS_INSN_ID(iobj, send)) {
5481  safenav_call = true;
5482  iobj = (INSN *)get_prev_insn(iobj);
5483  ELEM_INSERT_NEXT(&iobj->link, insn_element);
5484  }
5485  (pre->last = iobj->link.prev)->next = 0;
5486 
5487  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(iobj, 0);
5488  int argc = vm_ci_argc(ci) + 1;
5489  ci = ci_argc_set(iseq, ci, argc);
5490  OPERAND_AT(iobj, 0) = (VALUE)ci;
5491  RB_OBJ_WRITTEN(iseq, Qundef, ci);
5492 
5493  if (argc == 1) {
5494  ADD_INSN(lhs, line_node, swap);
5495  }
5496  else {
5497  ADD_INSN1(lhs, line_node, topn, INT2FIX(argc));
5498  }
5499 
5500  if (!add_masgn_lhs_node(state, lhs_pos, line_node, argc, (INSN *)LAST_ELEMENT(lhs))) {
5501  return COMPILE_NG;
5502  }
5503 
5504  iobj->link.prev = lhs->last;
5505  lhs->last->next = &iobj->link;
5506  for (lhs->last = &iobj->link; lhs->last->next; lhs->last = lhs->last->next);
5507  if (vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT) {
5508  int argc = vm_ci_argc(ci);
5509  bool dupsplat = false;
5510  ci = ci_argc_set(iseq, ci, argc - 1);
5511  if (!(vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT_MUT)) {
5512  /* Given h[*a], _ = ary
5513  * setup_args sets VM_CALL_ARGS_SPLAT and not VM_CALL_ARGS_SPLAT_MUT
5514  * `a` must be dupped, because it will be appended with ary[0]
5515  * Since you are dupping `a`, you can set VM_CALL_ARGS_SPLAT_MUT
5516  */
5517  dupsplat = true;
5518  ci = ci_flag_set(iseq, ci, VM_CALL_ARGS_SPLAT_MUT);
5519  }
5520  OPERAND_AT(iobj, 0) = (VALUE)ci;
5521  RB_OBJ_WRITTEN(iseq, Qundef, iobj);
5522 
5523  /* Given: h[*a], h[*b, 1] = ary
5524  * h[*a] uses splatarray false and does not set VM_CALL_ARGS_SPLAT_MUT,
5525  * so this uses splatarray true on a to dup it before using pushtoarray
5526  * h[*b, 1] uses splatarray true and sets VM_CALL_ARGS_SPLAT_MUT,
5527  * so you can use pushtoarray directly
5528  */
5529  int line_no = nd_line(line_node);
5530  int node_id = nd_node_id(line_node);
5531 
5532  if (dupsplat) {
5533  INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5534  INSERT_BEFORE_INSN1(iobj, line_no, node_id, splatarray, Qtrue);
5535  INSERT_BEFORE_INSN(iobj, line_no, node_id, swap);
5536  }
5537  INSERT_BEFORE_INSN1(iobj, line_no, node_id, pushtoarray, INT2FIX(1));
5538  }
5539  if (!safenav_call) {
5540  ADD_INSN(lhs, line_node, pop);
5541  if (argc != 1) {
5542  ADD_INSN(lhs, line_node, pop);
5543  }
5544  }
5545  for (int i=0; i < argc; i++) {
5546  ADD_INSN(post, line_node, pop);
5547  }
5548  break;
5549  }
5550  case NODE_MASGN: {
5551  DECL_ANCHOR(nest_rhs);
5552  INIT_ANCHOR(nest_rhs);
5553  DECL_ANCHOR(nest_lhs);
5554  INIT_ANCHOR(nest_lhs);
5555 
5556  int prev_level = state->lhs_level;
5557  bool prev_nested = state->nested;
5558  state->nested = 1;
5559  state->lhs_level = lhs_pos - 1;
5560  CHECK(compile_massign0(iseq, pre, nest_rhs, nest_lhs, post, node, state, 1));
5561  state->lhs_level = prev_level;
5562  state->nested = prev_nested;
5563 
5564  ADD_SEQ(lhs, nest_rhs);
5565  ADD_SEQ(lhs, nest_lhs);
5566  break;
5567  }
5568  case NODE_CDECL:
5569  if (!RNODE_CDECL(node)->nd_vid) {
5570  /* Special handling only needed for expr::C, not for C */
5571  INSN *iobj;
5572 
5573  CHECK(COMPILE_POPPED(pre, "masgn lhs (NODE_CDECL)", node));
5574 
5575  LINK_ELEMENT *insn_element = LAST_ELEMENT(pre);
5576  iobj = (INSN *)insn_element; /* setconstant insn */
5577  ELEM_REMOVE((LINK_ELEMENT *)get_prev_insn((INSN *)get_prev_insn(iobj)));
5578  ELEM_REMOVE((LINK_ELEMENT *)get_prev_insn(iobj));
5579  ELEM_REMOVE(insn_element);
5580  pre->last = iobj->link.prev;
5581  ADD_ELEM(lhs, (LINK_ELEMENT *)iobj);
5582 
5583  if (!add_masgn_lhs_node(state, lhs_pos, node, 1, (INSN *)LAST_ELEMENT(lhs))) {
5584  return COMPILE_NG;
5585  }
5586 
5587  ADD_INSN(post, node, pop);
5588  break;
5589  }
5590  /* Fallthrough */
5591  default: {
5592  DECL_ANCHOR(anchor);
5593  INIT_ANCHOR(anchor);
5594  CHECK(COMPILE_POPPED(anchor, "masgn lhs", node));
5595  ELEM_REMOVE(FIRST_ELEMENT(anchor));
5596  ADD_SEQ(lhs, anchor);
5597  }
5598  }
5599 
5600  return COMPILE_OK;
5601 }
5602 
5603 static int
5604 compile_massign_opt_lhs(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *lhsn)
5605 {
5606  if (lhsn) {
5607  CHECK(compile_massign_opt_lhs(iseq, ret, RNODE_LIST(lhsn)->nd_next));
5608  CHECK(compile_massign_lhs(iseq, ret, ret, ret, ret, RNODE_LIST(lhsn)->nd_head, NULL, 0));
5609  }
5610  return COMPILE_OK;
5611 }
5612 
5613 static int
5614 compile_massign_opt(rb_iseq_t *iseq, LINK_ANCHOR *const ret,
5615  const NODE *rhsn, const NODE *orig_lhsn)
5616 {
5617  VALUE mem[64];
5618  const int memsize = numberof(mem);
5619  int memindex = 0;
5620  int llen = 0, rlen = 0;
5621  int i;
5622  const NODE *lhsn = orig_lhsn;
5623 
5624 #define MEMORY(v) { \
5625  int i; \
5626  if (memindex == memsize) return 0; \
5627  for (i=0; i<memindex; i++) { \
5628  if (mem[i] == (v)) return 0; \
5629  } \
5630  mem[memindex++] = (v); \
5631 }
5632 
5633  if (rhsn == 0 || !nd_type_p(rhsn, NODE_LIST)) {
5634  return 0;
5635  }
5636 
5637  while (lhsn) {
5638  const NODE *ln = RNODE_LIST(lhsn)->nd_head;
5639  switch (nd_type(ln)) {
5640  case NODE_LASGN:
5641  case NODE_DASGN:
5642  case NODE_IASGN:
5643  case NODE_CVASGN:
5644  MEMORY(get_nd_vid(ln));
5645  break;
5646  default:
5647  return 0;
5648  }
5649  lhsn = RNODE_LIST(lhsn)->nd_next;
5650  llen++;
5651  }
5652 
5653  while (rhsn) {
5654  if (llen <= rlen) {
5655  NO_CHECK(COMPILE_POPPED(ret, "masgn val (popped)", RNODE_LIST(rhsn)->nd_head));
5656  }
5657  else {
5658  NO_CHECK(COMPILE(ret, "masgn val", RNODE_LIST(rhsn)->nd_head));
5659  }
5660  rhsn = RNODE_LIST(rhsn)->nd_next;
5661  rlen++;
5662  }
5663 
5664  if (llen > rlen) {
5665  for (i=0; i<llen-rlen; i++) {
5666  ADD_INSN(ret, orig_lhsn, putnil);
5667  }
5668  }
5669 
5670  compile_massign_opt_lhs(iseq, ret, orig_lhsn);
5671  return 1;
5672 }
5673 
5674 static int
5675 compile_massign0(rb_iseq_t *iseq, LINK_ANCHOR *const pre, LINK_ANCHOR *const rhs, LINK_ANCHOR *const lhs, LINK_ANCHOR *const post, const NODE *const node, struct masgn_state *state, int popped)
5676 {
5677  const NODE *rhsn = RNODE_MASGN(node)->nd_value;
5678  const NODE *splatn = RNODE_MASGN(node)->nd_args;
5679  const NODE *lhsn = RNODE_MASGN(node)->nd_head;
5680  const NODE *lhsn_count = lhsn;
5681  int lhs_splat = (splatn && NODE_NAMED_REST_P(splatn)) ? 1 : 0;
5682 
5683  int llen = 0;
5684  int lpos = 0;
5685 
5686  while (lhsn_count) {
5687  llen++;
5688  lhsn_count = RNODE_LIST(lhsn_count)->nd_next;
5689  }
5690  while (lhsn) {
5691  CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(lhsn)->nd_head, state, (llen - lpos) + lhs_splat + state->lhs_level));
5692  lpos++;
5693  lhsn = RNODE_LIST(lhsn)->nd_next;
5694  }
5695 
5696  if (lhs_splat) {
5697  if (nd_type_p(splatn, NODE_POSTARG)) {
5698  /*a, b, *r, p1, p2 */
5699  const NODE *postn = RNODE_POSTARG(splatn)->nd_2nd;
5700  const NODE *restn = RNODE_POSTARG(splatn)->nd_1st;
5701  int plen = (int)RNODE_LIST(postn)->as.nd_alen;
5702  int ppos = 0;
5703  int flag = 0x02 | (NODE_NAMED_REST_P(restn) ? 0x01 : 0x00);
5704 
5705  ADD_INSN2(lhs, splatn, expandarray, INT2FIX(plen), INT2FIX(flag));
5706 
5707  if (NODE_NAMED_REST_P(restn)) {
5708  CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, restn, state, 1 + plen + state->lhs_level));
5709  }
5710  while (postn) {
5711  CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, RNODE_LIST(postn)->nd_head, state, (plen - ppos) + state->lhs_level));
5712  ppos++;
5713  postn = RNODE_LIST(postn)->nd_next;
5714  }
5715  }
5716  else {
5717  /* a, b, *r */
5718  CHECK(compile_massign_lhs(iseq, pre, rhs, lhs, post, splatn, state, 1 + state->lhs_level));
5719  }
5720  }
5721 
5722  if (!state->nested) {
5723  NO_CHECK(COMPILE(rhs, "normal masgn rhs", rhsn));
5724  }
5725 
5726  if (!popped) {
5727  ADD_INSN(rhs, node, dup);
5728  }
5729  ADD_INSN2(rhs, node, expandarray, INT2FIX(llen), INT2FIX(lhs_splat));
5730  return COMPILE_OK;
5731 }
5732 
5733 static int
5734 compile_massign(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
5735 {
5736  if (!popped || RNODE_MASGN(node)->nd_args || !compile_massign_opt(iseq, ret, RNODE_MASGN(node)->nd_value, RNODE_MASGN(node)->nd_head)) {
5737  struct masgn_state state;
5738  state.lhs_level = popped ? 0 : 1;
5739  state.nested = 0;
5740  state.num_args = 0;
5741  state.first_memo = NULL;
5742  state.last_memo = NULL;
5743 
5744  DECL_ANCHOR(pre);
5745  INIT_ANCHOR(pre);
5746  DECL_ANCHOR(rhs);
5747  INIT_ANCHOR(rhs);
5748  DECL_ANCHOR(lhs);
5749  INIT_ANCHOR(lhs);
5750  DECL_ANCHOR(post);
5751  INIT_ANCHOR(post);
5752  int ok = compile_massign0(iseq, pre, rhs, lhs, post, node, &state, popped);
5753 
5754  struct masgn_lhs_node *memo = state.first_memo, *tmp_memo;
5755  while (memo) {
5756  VALUE topn_arg = INT2FIX((state.num_args - memo->argn) + memo->lhs_pos);
5757  for (int i = 0; i < memo->num_args; i++) {
5758  INSERT_BEFORE_INSN1(memo->before_insn, nd_line(memo->line_node), nd_node_id(memo->line_node), topn, topn_arg);
5759  }
5760  tmp_memo = memo->next;
5761  free(memo);
5762  memo = tmp_memo;
5763  }
5764  CHECK(ok);
5765 
5766  ADD_SEQ(ret, pre);
5767  ADD_SEQ(ret, rhs);
5768  ADD_SEQ(ret, lhs);
5769  if (!popped && state.num_args >= 1) {
5770  /* make sure rhs array is returned before popping */
5771  ADD_INSN1(ret, node, setn, INT2FIX(state.num_args));
5772  }
5773  ADD_SEQ(ret, post);
5774  }
5775  return COMPILE_OK;
5776 }
5777 
5778 static VALUE
5779 collect_const_segments(rb_iseq_t *iseq, const NODE *node)
5780 {
5781  VALUE arr = rb_ary_new();
5782  for (;;) {
5783  switch (nd_type(node)) {
5784  case NODE_CONST:
5785  rb_ary_unshift(arr, ID2SYM(RNODE_CONST(node)->nd_vid));
5786  return arr;
5787  case NODE_COLON3:
5788  rb_ary_unshift(arr, ID2SYM(RNODE_COLON3(node)->nd_mid));
5789  rb_ary_unshift(arr, ID2SYM(idNULL));
5790  return arr;
5791  case NODE_COLON2:
5792  rb_ary_unshift(arr, ID2SYM(RNODE_COLON2(node)->nd_mid));
5793  node = RNODE_COLON2(node)->nd_head;
5794  break;
5795  default:
5796  return Qfalse;
5797  }
5798  }
5799 }
5800 
5801 static int
5802 compile_const_prefix(rb_iseq_t *iseq, const NODE *const node,
5803  LINK_ANCHOR *const pref, LINK_ANCHOR *const body)
5804 {
5805  switch (nd_type(node)) {
5806  case NODE_CONST:
5807  debugi("compile_const_prefix - colon", RNODE_CONST(node)->nd_vid);
5808  ADD_INSN1(body, node, putobject, Qtrue);
5809  ADD_INSN1(body, node, getconstant, ID2SYM(RNODE_CONST(node)->nd_vid));
5810  break;
5811  case NODE_COLON3:
5812  debugi("compile_const_prefix - colon3", RNODE_COLON3(node)->nd_mid);
5813  ADD_INSN(body, node, pop);
5814  ADD_INSN1(body, node, putobject, rb_cObject);
5815  ADD_INSN1(body, node, putobject, Qtrue);
5816  ADD_INSN1(body, node, getconstant, ID2SYM(RNODE_COLON3(node)->nd_mid));
5817  break;
5818  case NODE_COLON2:
5819  CHECK(compile_const_prefix(iseq, RNODE_COLON2(node)->nd_head, pref, body));
5820  debugi("compile_const_prefix - colon2", RNODE_COLON2(node)->nd_mid);
5821  ADD_INSN1(body, node, putobject, Qfalse);
5822  ADD_INSN1(body, node, getconstant, ID2SYM(RNODE_COLON2(node)->nd_mid));
5823  break;
5824  default:
5825  CHECK(COMPILE(pref, "const colon2 prefix", node));
5826  break;
5827  }
5828  return COMPILE_OK;
5829 }
5830 
5831 static int
5832 compile_cpath(LINK_ANCHOR *const ret, rb_iseq_t *iseq, const NODE *cpath)
5833 {
5834  if (nd_type_p(cpath, NODE_COLON3)) {
5835  /* toplevel class ::Foo */
5836  ADD_INSN1(ret, cpath, putobject, rb_cObject);
5837  return VM_DEFINECLASS_FLAG_SCOPED;
5838  }
5839  else if (nd_type_p(cpath, NODE_COLON2) && RNODE_COLON2(cpath)->nd_head) {
5840  /* Bar::Foo */
5841  NO_CHECK(COMPILE(ret, "nd_else->nd_head", RNODE_COLON2(cpath)->nd_head));
5842  return VM_DEFINECLASS_FLAG_SCOPED;
5843  }
5844  else {
5845  /* class at cbase Foo */
5846  ADD_INSN1(ret, cpath, putspecialobject,
5847  INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5848  return 0;
5849  }
5850 }
5851 
5852 static inline int
5853 private_recv_p(const NODE *node)
5854 {
5855  NODE *recv = get_nd_recv(node);
5856  if (recv && nd_type_p(recv, NODE_SELF)) {
5857  return RNODE_SELF(recv)->nd_state != 0;
5858  }
5859  return 0;
5860 }
5861 
5862 static void
5863 defined_expr(rb_iseq_t *iseq, LINK_ANCHOR *const ret,
5864  const NODE *const node, LABEL **lfinish, VALUE needstr, bool ignore);
5865 
5866 static int
5867 compile_call(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, const enum node_type type, const NODE *const line_node, int popped, bool assume_receiver);
5868 
5869 static void
5870 defined_expr0(rb_iseq_t *iseq, LINK_ANCHOR *const ret,
5871  const NODE *const node, LABEL **lfinish, VALUE needstr,
5872  bool keep_result)
5873 {
5874  enum defined_type expr_type = DEFINED_NOT_DEFINED;
5875  enum node_type type;
5876  const int line = nd_line(node);
5877  const NODE *line_node = node;
5878 
5879  switch (type = nd_type(node)) {
5880 
5881  /* easy literals */
5882  case NODE_NIL:
5883  expr_type = DEFINED_NIL;
5884  break;
5885  case NODE_SELF:
5886  expr_type = DEFINED_SELF;
5887  break;
5888  case NODE_TRUE:
5889  expr_type = DEFINED_TRUE;
5890  break;
5891  case NODE_FALSE:
5892  expr_type = DEFINED_FALSE;
5893  break;
5894 
5895  case NODE_HASH:
5896  case NODE_LIST:{
5897  const NODE *vals = (nd_type(node) == NODE_HASH) ? RNODE_HASH(node)->nd_head : node;
5898 
5899  if (vals) {
5900  do {
5901  if (RNODE_LIST(vals)->nd_head) {
5902  defined_expr0(iseq, ret, RNODE_LIST(vals)->nd_head, lfinish, Qfalse, false);
5903 
5904  if (!lfinish[1]) {
5905  lfinish[1] = NEW_LABEL(line);
5906  }
5907  ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
5908  }
5909  } while ((vals = RNODE_LIST(vals)->nd_next) != NULL);
5910  }
5911  }
5912  /* fall through */
5913  case NODE_STR:
5914  case NODE_SYM:
5915  case NODE_REGX:
5916  case NODE_LINE:
5917  case NODE_FILE:
5918  case NODE_ENCODING:
5919  case NODE_INTEGER:
5920  case NODE_FLOAT:
5921  case NODE_RATIONAL:
5922  case NODE_IMAGINARY:
5923  case NODE_ZLIST:
5924  case NODE_AND:
5925  case NODE_OR:
5926  default:
5927  expr_type = DEFINED_EXPR;
5928  break;
5929 
5930  case NODE_SPLAT:
5931  defined_expr0(iseq, ret, RNODE_LIST(node)->nd_head, lfinish, Qfalse, false);
5932  if (!lfinish[1]) {
5933  lfinish[1] = NEW_LABEL(line);
5934  }
5935  ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
5936  expr_type = DEFINED_EXPR;
5937  break;
5938 
5939  /* variables */
5940  case NODE_LVAR:
5941  case NODE_DVAR:
5942  expr_type = DEFINED_LVAR;
5943  break;
5944 
5945 #define PUSH_VAL(type) (needstr == Qfalse ? Qtrue : rb_iseq_defined_string(type))
5946  case NODE_IVAR:
5947  ADD_INSN3(ret, line_node, definedivar,
5948  ID2SYM(RNODE_IVAR(node)->nd_vid), get_ivar_ic_value(iseq,RNODE_IVAR(node)->nd_vid), PUSH_VAL(DEFINED_IVAR));
5949  return;
5950 
5951  case NODE_GVAR:
5952  ADD_INSN(ret, line_node, putnil);
5953  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_GVAR),
5954  ID2SYM(RNODE_GVAR(node)->nd_vid), PUSH_VAL(DEFINED_GVAR));
5955  return;
5956 
5957  case NODE_CVAR:
5958  ADD_INSN(ret, line_node, putnil);
5959  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_CVAR),
5960  ID2SYM(RNODE_CVAR(node)->nd_vid), PUSH_VAL(DEFINED_CVAR));
5961  return;
5962 
5963  case NODE_CONST:
5964  ADD_INSN(ret, line_node, putnil);
5965  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_CONST),
5966  ID2SYM(RNODE_CONST(node)->nd_vid), PUSH_VAL(DEFINED_CONST));
5967  return;
5968  case NODE_COLON2:
5969  if (!lfinish[1]) {
5970  lfinish[1] = NEW_LABEL(line);
5971  }
5972  defined_expr0(iseq, ret, RNODE_COLON2(node)->nd_head, lfinish, Qfalse, false);
5973  ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
5974  NO_CHECK(COMPILE(ret, "defined/colon2#nd_head", RNODE_COLON2(node)->nd_head));
5975 
5976  if (rb_is_const_id(RNODE_COLON2(node)->nd_mid)) {
5977  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_CONST_FROM),
5978  ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
5979  }
5980  else {
5981  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_METHOD),
5982  ID2SYM(RNODE_COLON2(node)->nd_mid), PUSH_VAL(DEFINED_METHOD));
5983  }
5984  return;
5985  case NODE_COLON3:
5986  ADD_INSN1(ret, line_node, putobject, rb_cObject);
5987  ADD_INSN3(ret, line_node, defined,
5988  INT2FIX(DEFINED_CONST_FROM), ID2SYM(RNODE_COLON3(node)->nd_mid), PUSH_VAL(DEFINED_CONST));
5989  return;
5990 
5991  /* method dispatch */
5992  case NODE_CALL:
5993  case NODE_OPCALL:
5994  case NODE_VCALL:
5995  case NODE_FCALL:
5996  case NODE_ATTRASGN:{
5997  const int explicit_receiver =
5998  (type == NODE_CALL || type == NODE_OPCALL ||
5999  (type == NODE_ATTRASGN && !private_recv_p(node)));
6000 
6001  if (get_nd_args(node) || explicit_receiver) {
6002  if (!lfinish[1]) {
6003  lfinish[1] = NEW_LABEL(line);
6004  }
6005  if (!lfinish[2]) {
6006  lfinish[2] = NEW_LABEL(line);
6007  }
6008  }
6009  if (get_nd_args(node)) {
6010  defined_expr0(iseq, ret, get_nd_args(node), lfinish, Qfalse, false);
6011  ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6012  }
6013  if (explicit_receiver) {
6014  defined_expr0(iseq, ret, get_nd_recv(node), lfinish, Qfalse, true);
6015  switch (nd_type(get_nd_recv(node))) {
6016  case NODE_CALL:
6017  case NODE_OPCALL:
6018  case NODE_VCALL:
6019  case NODE_FCALL:
6020  case NODE_ATTRASGN:
6021  ADD_INSNL(ret, line_node, branchunless, lfinish[2]);
6022  compile_call(iseq, ret, get_nd_recv(node), nd_type(get_nd_recv(node)), line_node, 0, true);
6023  break;
6024  default:
6025  ADD_INSNL(ret, line_node, branchunless, lfinish[1]);
6026  NO_CHECK(COMPILE(ret, "defined/recv", get_nd_recv(node)));
6027  break;
6028  }
6029  if (keep_result) {
6030  ADD_INSN(ret, line_node, dup);
6031  }
6032  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_METHOD),
6033  ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6034  }
6035  else {
6036  ADD_INSN(ret, line_node, putself);
6037  if (keep_result) {
6038  ADD_INSN(ret, line_node, dup);
6039  }
6040  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_FUNC),
6041  ID2SYM(get_node_call_nd_mid(node)), PUSH_VAL(DEFINED_METHOD));
6042  }
6043  return;
6044  }
6045 
6046  case NODE_YIELD:
6047  ADD_INSN(ret, line_node, putnil);
6048  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_YIELD), 0,
6049  PUSH_VAL(DEFINED_YIELD));
6050  iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
6051  return;
6052 
6053  case NODE_BACK_REF:
6054  case NODE_NTH_REF:
6055  ADD_INSN(ret, line_node, putnil);
6056  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_REF),
6057  INT2FIX((RNODE_BACK_REF(node)->nd_nth << 1) | (type == NODE_BACK_REF)),
6058  PUSH_VAL(DEFINED_GVAR));
6059  return;
6060 
6061  case NODE_SUPER:
6062  case NODE_ZSUPER:
6063  ADD_INSN(ret, line_node, putnil);
6064  ADD_INSN3(ret, line_node, defined, INT2FIX(DEFINED_ZSUPER), 0,
6065  PUSH_VAL(DEFINED_ZSUPER));
6066  return;
6067 
6068 #undef PUSH_VAL
6069  case NODE_OP_ASGN1:
6070  case NODE_OP_ASGN2:
6071  case NODE_OP_ASGN_OR:
6072  case NODE_OP_ASGN_AND:
6073  case NODE_MASGN:
6074  case NODE_LASGN:
6075  case NODE_DASGN:
6076  case NODE_GASGN:
6077  case NODE_IASGN:
6078  case NODE_CDECL:
6079  case NODE_CVASGN:
6080  case NODE_OP_CDECL:
6081  expr_type = DEFINED_ASGN;
6082  break;
6083  }
6084 
6085  RUBY_ASSERT(expr_type != DEFINED_NOT_DEFINED);
6086 
6087  if (needstr != Qfalse) {
6088  VALUE str = rb_iseq_defined_string(expr_type);
6089  ADD_INSN1(ret, line_node, putobject, str);
6090  }
6091  else {
6092  ADD_INSN1(ret, line_node, putobject, Qtrue);
6093  }
6094 }
6095 
6096 static void
6097 build_defined_rescue_iseq(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const void *unused)
6098 {
6099  ADD_SYNTHETIC_INSN(ret, 0, -1, putnil);
6100  iseq_set_exception_local_table(iseq);
6101 }
6102 
6103 static void
6104 defined_expr(rb_iseq_t *iseq, LINK_ANCHOR *const ret,
6105  const NODE *const node, LABEL **lfinish, VALUE needstr, bool ignore)
6106 {
6107  LINK_ELEMENT *lcur = ret->last;
6108  defined_expr0(iseq, ret, node, lfinish, needstr, false);
6109  if (lfinish[1]) {
6110  int line = nd_line(node);
6111  LABEL *lstart = NEW_LABEL(line);
6112  LABEL *lend = NEW_LABEL(line);
6113  const rb_iseq_t *rescue;
6115  rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq, NULL);
6116  rescue = new_child_iseq_with_callback(iseq, ifunc,
6117  rb_str_concat(rb_str_new2("defined guard in "),
6118  ISEQ_BODY(iseq)->location.label),
6119  iseq, ISEQ_TYPE_RESCUE, 0);
6120  lstart->rescued = LABEL_RESCUE_BEG;
6121  lend->rescued = LABEL_RESCUE_END;
6122  APPEND_LABEL(ret, lcur, lstart);
6123  ADD_LABEL(ret, lend);
6124  if (!ignore) {
6125  ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lfinish[1]);
6126  }
6127  }
6128 }
6129 
6130 static int
6131 compile_defined_expr(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, VALUE needstr, bool ignore)
6132 {
6133  const int line = nd_line(node);
6134  const NODE *line_node = node;
6135  if (!RNODE_DEFINED(node)->nd_head) {
6136  VALUE str = rb_iseq_defined_string(DEFINED_NIL);
6137  ADD_INSN1(ret, line_node, putobject, str);
6138  }
6139  else {
6140  LABEL *lfinish[3];
6141  LINK_ELEMENT *last = ret->last;
6142  lfinish[0] = NEW_LABEL(line);
6143  lfinish[1] = 0;
6144  lfinish[2] = 0;
6145  defined_expr(iseq, ret, RNODE_DEFINED(node)->nd_head, lfinish, needstr, ignore);
6146  if (lfinish[1]) {
6147  ELEM_INSERT_NEXT(last, &new_insn_body(iseq, nd_line(line_node), nd_node_id(line_node), BIN(putnil), 0)->link);
6148  ADD_INSN(ret, line_node, swap);
6149  if (lfinish[2]) {
6150  ADD_LABEL(ret, lfinish[2]);
6151  }
6152  ADD_INSN(ret, line_node, pop);
6153  ADD_LABEL(ret, lfinish[1]);
6154  }
6155  ADD_LABEL(ret, lfinish[0]);
6156  }
6157  return COMPILE_OK;
6158 }
6159 
6160 static VALUE
6161 make_name_for_block(const rb_iseq_t *orig_iseq)
6162 {
6163  int level = 1;
6164  const rb_iseq_t *iseq = orig_iseq;
6165 
6166  if (ISEQ_BODY(orig_iseq)->parent_iseq != 0) {
6167  while (ISEQ_BODY(orig_iseq)->local_iseq != iseq) {
6168  if (ISEQ_BODY(iseq)->type == ISEQ_TYPE_BLOCK) {
6169  level++;
6170  }
6171  iseq = ISEQ_BODY(iseq)->parent_iseq;
6172  }
6173  }
6174 
6175  if (level == 1) {
6176  return rb_sprintf("block in %"PRIsVALUE, ISEQ_BODY(iseq)->location.label);
6177  }
6178  else {
6179  return rb_sprintf("block (%d levels) in %"PRIsVALUE, level, ISEQ_BODY(iseq)->location.label);
6180  }
6181 }
6182 
6183 static void
6184 push_ensure_entry(rb_iseq_t *iseq,
6186  struct ensure_range *er, const void *const node)
6187 {
6188  enl->ensure_node = node;
6189  enl->prev = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack; /* prev */
6190  enl->erange = er;
6191  ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl;
6192 }
6193 
6194 static void
6195 add_ensure_range(rb_iseq_t *iseq, struct ensure_range *erange,
6196  LABEL *lstart, LABEL *lend)
6197 {
6198  struct ensure_range *ne =
6199  compile_data_alloc(iseq, sizeof(struct ensure_range));
6200 
6201  while (erange->next != 0) {
6202  erange = erange->next;
6203  }
6204  ne->next = 0;
6205  ne->begin = lend;
6206  ne->end = erange->end;
6207  erange->end = lstart;
6208 
6209  erange->next = ne;
6210 }
6211 
6212 static bool
6213 can_add_ensure_iseq(const rb_iseq_t *iseq)
6214 {
6216  if (ISEQ_COMPILE_DATA(iseq)->in_rescue && (e = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack) != NULL) {
6217  while (e) {
6218  if (e->ensure_node) return false;
6219  e = e->prev;
6220  }
6221  }
6222  return true;
6223 }
6224 
6225 static void
6226 add_ensure_iseq(LINK_ANCHOR *const ret, rb_iseq_t *iseq, int is_return)
6227 {
6228  RUBY_ASSERT(can_add_ensure_iseq(iseq));
6229 
6231  ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
6232  struct iseq_compile_data_ensure_node_stack *prev_enlp = enlp;
6233  DECL_ANCHOR(ensure);
6234 
6235  INIT_ANCHOR(ensure);
6236  while (enlp) {
6237  if (enlp->erange != NULL) {
6238  DECL_ANCHOR(ensure_part);
6239  LABEL *lstart = NEW_LABEL(0);
6240  LABEL *lend = NEW_LABEL(0);
6241  INIT_ANCHOR(ensure_part);
6242 
6243  add_ensure_range(iseq, enlp->erange, lstart, lend);
6244 
6245  ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enlp->prev;
6246  ADD_LABEL(ensure_part, lstart);
6247  NO_CHECK(COMPILE_POPPED(ensure_part, "ensure part", enlp->ensure_node));
6248  ADD_LABEL(ensure_part, lend);
6249  ADD_SEQ(ensure, ensure_part);
6250  }
6251  else {
6252  if (!is_return) {
6253  break;
6254  }
6255  }
6256  enlp = enlp->prev;
6257  }
6258  ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = prev_enlp;
6259  ADD_SEQ(ret, ensure);
6260 }
6261 
6262 #if RUBY_DEBUG
6263 static int
6264 check_keyword(const NODE *node)
6265 {
6266  /* This check is essentially a code clone of compile_keyword_arg. */
6267 
6268  if (nd_type_p(node, NODE_LIST)) {
6269  while (RNODE_LIST(node)->nd_next) {
6270  node = RNODE_LIST(node)->nd_next;
6271  }
6272  node = RNODE_LIST(node)->nd_head;
6273  }
6274 
6275  return keyword_node_p(node);
6276 }
6277 #endif
6278 
6279 static bool
6280 keyword_node_single_splat_p(NODE *kwnode)
6281 {
6282  RUBY_ASSERT(keyword_node_p(kwnode));
6283 
6284  NODE *node = RNODE_HASH(kwnode)->nd_head;
6285  return RNODE_LIST(node)->nd_head == NULL &&
6286  RNODE_LIST(RNODE_LIST(node)->nd_next)->nd_next == NULL;
6287 }
6288 
6289 static void
6290 compile_single_keyword_splat_mutable(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
6291  NODE *kwnode, unsigned int *flag_ptr)
6292 {
6293  *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6294  ADD_INSN1(args, argn, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6295  ADD_INSN1(args, argn, newhash, INT2FIX(0));
6296  compile_hash(iseq, args, kwnode, TRUE, FALSE);
6297  ADD_SEND(args, argn, id_core_hash_merge_kwd, INT2FIX(2));
6298 }
6299 
6300 #define SPLATARRAY_FALSE 0
6301 #define SPLATARRAY_TRUE 1
6302 #define DUP_SINGLE_KW_SPLAT 2
6303 
6304 static int
6305 setup_args_core(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
6306  unsigned int *dup_rest, unsigned int *flag_ptr, struct rb_callinfo_kwarg **kwarg_ptr)
6307 {
6308  if (!argn) return 0;
6309 
6310  NODE *kwnode = NULL;
6311 
6312  switch (nd_type(argn)) {
6313  case NODE_LIST: {
6314  // f(x, y, z)
6315  int len = compile_args(iseq, args, argn, &kwnode);
6316  RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_ARGS_SPLAT) == 0);
6317 
6318  if (kwnode) {
6319  if (compile_keyword_arg(iseq, args, kwnode, kwarg_ptr, flag_ptr)) {
6320  len -= 1;
6321  }
6322  else {
6323  if (keyword_node_single_splat_p(kwnode) && (*dup_rest & DUP_SINGLE_KW_SPLAT)) {
6324  compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6325  }
6326  else {
6327  compile_hash(iseq, args, kwnode, TRUE, FALSE);
6328  }
6329  }
6330  }
6331 
6332  return len;
6333  }
6334  case NODE_SPLAT: {
6335  // f(*a)
6336  NO_CHECK(COMPILE(args, "args (splat)", RNODE_SPLAT(argn)->nd_head));
6337  ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6338  if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6339  if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6340  RUBY_ASSERT(flag_ptr == NULL || (*flag_ptr & VM_CALL_KW_SPLAT) == 0);
6341  return 1;
6342  }
6343  case NODE_ARGSCAT: {
6344  if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6345  int argc = setup_args_core(iseq, args, RNODE_ARGSCAT(argn)->nd_head, dup_rest, NULL, NULL);
6346  bool args_pushed = false;
6347 
6348  if (nd_type_p(RNODE_ARGSCAT(argn)->nd_body, NODE_LIST)) {
6349  int rest_len = compile_args(iseq, args, RNODE_ARGSCAT(argn)->nd_body, &kwnode);
6350  if (kwnode) rest_len--;
6351  ADD_INSN1(args, argn, pushtoarray, INT2FIX(rest_len));
6352  args_pushed = true;
6353  }
6354  else {
6355  RUBY_ASSERT(!check_keyword(RNODE_ARGSCAT(argn)->nd_body));
6356  NO_CHECK(COMPILE(args, "args (cat: splat)", RNODE_ARGSCAT(argn)->nd_body));
6357  }
6358 
6359  if (nd_type_p(RNODE_ARGSCAT(argn)->nd_head, NODE_LIST)) {
6360  ADD_INSN1(args, argn, splatarray, RBOOL(*dup_rest & SPLATARRAY_TRUE));
6361  if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
6362  argc += 1;
6363  }
6364  else if (!args_pushed) {
6365  ADD_INSN(args, argn, concattoarray);
6366  }
6367 
6368  // f(..., *a, ..., k1:1, ...) #=> f(..., *[*a, ...], **{k1:1, ...})
6369  if (kwnode) {
6370  // kwsplat
6371  *flag_ptr |= VM_CALL_KW_SPLAT;
6372  compile_hash(iseq, args, kwnode, TRUE, FALSE);
6373  argc += 1;
6374  }
6375 
6376  return argc;
6377  }
6378  case NODE_ARGSPUSH: {
6379  if (flag_ptr) *flag_ptr |= VM_CALL_ARGS_SPLAT;
6380  int argc = setup_args_core(iseq, args, RNODE_ARGSPUSH(argn)->nd_head, dup_rest, NULL, NULL);
6381 
6382  if (nd_type_p(RNODE_ARGSPUSH(argn)->nd_body, NODE_LIST)) {
6383  int rest_len = compile_args(iseq, args, RNODE_ARGSPUSH(argn)->nd_body, &kwnode);
6384  if (kwnode) rest_len--;
6385  ADD_INSN1(args, argn, newarray, INT2FIX(rest_len));
6386  ADD_INSN1(args, argn, pushtoarray, INT2FIX(1));
6387  }
6388  else {
6389  if (keyword_node_p(RNODE_ARGSPUSH(argn)->nd_body)) {
6390  kwnode = RNODE_ARGSPUSH(argn)->nd_body;
6391  }
6392  else {
6393  NO_CHECK(COMPILE(args, "args (cat: splat)", RNODE_ARGSPUSH(argn)->nd_body));
6394  ADD_INSN1(args, argn, pushtoarray, INT2FIX(1));
6395  }
6396  }
6397 
6398  if (kwnode) {
6399  // f(*a, k:1)
6400  *flag_ptr |= VM_CALL_KW_SPLAT;
6401  if (!keyword_node_single_splat_p(kwnode)) {
6402  *flag_ptr |= VM_CALL_KW_SPLAT_MUT;
6403  compile_hash(iseq, args, kwnode, TRUE, FALSE);
6404  }
6405  else if (*dup_rest & DUP_SINGLE_KW_SPLAT) {
6406  compile_single_keyword_splat_mutable(iseq, args, argn, kwnode, flag_ptr);
6407  }
6408  else {
6409  compile_hash(iseq, args, kwnode, TRUE, FALSE);
6410  }
6411  argc += 1;
6412  }
6413 
6414  return argc;
6415  }
6416  default: {
6417  UNKNOWN_NODE("setup_arg", argn, Qnil);
6418  }
6419  }
6420 }
6421 
6422 static void
6423 setup_args_splat_mut(unsigned int *flag, int dup_rest, int initial_dup_rest)
6424 {
6425  if ((*flag & VM_CALL_ARGS_SPLAT) && dup_rest != initial_dup_rest) {
6426  *flag |= VM_CALL_ARGS_SPLAT_MUT;
6427  }
6428 }
6429 
6430 static bool
6431 setup_args_dup_rest_p(const NODE *argn)
6432 {
6433  switch(nd_type(argn)) {
6434  case NODE_LVAR:
6435  case NODE_DVAR:
6436  case NODE_GVAR:
6437  case NODE_IVAR:
6438  case NODE_CVAR:
6439  case NODE_CONST:
6440  case NODE_COLON3:
6441  case NODE_INTEGER:
6442  case NODE_FLOAT:
6443  case NODE_RATIONAL:
6444  case NODE_IMAGINARY:
6445  case NODE_STR:
6446  case NODE_SYM:
6447  case NODE_REGX:
6448  case NODE_SELF:
6449  case NODE_NIL:
6450  case NODE_TRUE:
6451  case NODE_FALSE:
6452  case NODE_LAMBDA:
6453  case NODE_NTH_REF:
6454  case NODE_BACK_REF:
6455  return false;
6456  case NODE_COLON2:
6457  return setup_args_dup_rest_p(RNODE_COLON2(argn)->nd_head);
6458  default:
6459  return true;
6460  }
6461 }
6462 
6463 static VALUE
6464 setup_args(rb_iseq_t *iseq, LINK_ANCHOR *const args, const NODE *argn,
6465  unsigned int *flag, struct rb_callinfo_kwarg **keywords)
6466 {
6467  VALUE ret;
6468  unsigned int dup_rest = SPLATARRAY_TRUE, initial_dup_rest;
6469 
6470  if (argn) {
6471  const NODE *check_arg = nd_type_p(argn, NODE_BLOCK_PASS) ?
6472  RNODE_BLOCK_PASS(argn)->nd_head : argn;
6473 
6474  if (check_arg) {
6475  switch(nd_type(check_arg)) {
6476  case(NODE_SPLAT):
6477  // avoid caller side array allocation for f(*arg)
6478  dup_rest = SPLATARRAY_FALSE;
6479  break;
6480  case(NODE_ARGSCAT):
6481  // avoid caller side array allocation for f(1, *arg)
6482  dup_rest = !nd_type_p(RNODE_ARGSCAT(check_arg)->nd_head, NODE_LIST);
6483  break;
6484  case(NODE_ARGSPUSH):
6485  // avoid caller side array allocation for f(*arg, **hash) and f(1, *arg, **hash)
6486  dup_rest = !((nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_SPLAT) ||
6487  (nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_head, NODE_ARGSCAT) &&
6488  nd_type_p(RNODE_ARGSCAT(RNODE_ARGSPUSH(check_arg)->nd_head)->nd_head, NODE_LIST))) &&
6489  nd_type_p(RNODE_ARGSPUSH(check_arg)->nd_body, NODE_HASH) &&
6490  !RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_brace);
6491 
6492  if (dup_rest == SPLATARRAY_FALSE) {
6493  // require allocation for keyword key/value/splat that may modify splatted argument
6494  NODE *node = RNODE_HASH(RNODE_ARGSPUSH(check_arg)->nd_body)->nd_head;
6495  while (node) {
6496  NODE *key_node = RNODE_LIST(node)->nd_head;
6497  if (key_node && setup_args_dup_rest_p(key_node)) {
6498  dup_rest = SPLATARRAY_TRUE;
6499  break;
6500  }
6501 
6502  node = RNODE_LIST(node)->nd_next;
6503  NODE *value_node = RNODE_LIST(node)->nd_head;
6504  if (setup_args_dup_rest_p(value_node)) {
6505  dup_rest = SPLATARRAY_TRUE;
6506  break;
6507  }
6508 
6509  node = RNODE_LIST(node)->nd_next;
6510  }
6511  }
6512  break;
6513  default:
6514  break;
6515  }
6516  }
6517 
6518  if (check_arg != argn && setup_args_dup_rest_p(RNODE_BLOCK_PASS(argn)->nd_body)) {
6519  // for block pass that may modify splatted argument, dup rest and kwrest if given
6520  dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
6521  }
6522  }
6523  initial_dup_rest = dup_rest;
6524 
6525  if (argn && nd_type_p(argn, NODE_BLOCK_PASS)) {
6526  DECL_ANCHOR(arg_block);
6527  INIT_ANCHOR(arg_block);
6528 
6529  if (RNODE_BLOCK_PASS(argn)->forwarding && ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
6530  int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size;// - get_local_var_idx(iseq, idDot3);
6531 
6532  RUBY_ASSERT(nd_type_p(RNODE_BLOCK_PASS(argn)->nd_head, NODE_ARGSPUSH));
6533  const NODE * arg_node =
6534  RNODE_ARGSPUSH(RNODE_BLOCK_PASS(argn)->nd_head)->nd_head;
6535 
6536  int argc = 0;
6537 
6538  // Only compile leading args:
6539  // foo(x, y, ...)
6540  // ^^^^
6541  if (nd_type_p(arg_node, NODE_ARGSCAT)) {
6542  argc += setup_args_core(iseq, args, RNODE_ARGSCAT(arg_node)->nd_head, &dup_rest, flag, keywords);
6543  }
6544 
6545  *flag |= VM_CALL_FORWARDING;
6546 
6547  ADD_GETLOCAL(args, argn, idx, get_lvar_level(iseq));
6548  setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6549  return INT2FIX(argc);
6550  }
6551  else {
6552  *flag |= VM_CALL_ARGS_BLOCKARG;
6553 
6554  NO_CHECK(COMPILE(arg_block, "block", RNODE_BLOCK_PASS(argn)->nd_body));
6555  }
6556 
6557  if (LIST_INSN_SIZE_ONE(arg_block)) {
6558  LINK_ELEMENT *elem = FIRST_ELEMENT(arg_block);
6559  if (IS_INSN(elem)) {
6560  INSN *iobj = (INSN *)elem;
6561  if (iobj->insn_id == BIN(getblockparam)) {
6562  iobj->insn_id = BIN(getblockparamproxy);
6563  }
6564  }
6565  }
6566  ret = INT2FIX(setup_args_core(iseq, args, RNODE_BLOCK_PASS(argn)->nd_head, &dup_rest, flag, keywords));
6567  ADD_SEQ(args, arg_block);
6568  }
6569  else {
6570  ret = INT2FIX(setup_args_core(iseq, args, argn, &dup_rest, flag, keywords));
6571  }
6572  setup_args_splat_mut(flag, dup_rest, initial_dup_rest);
6573  return ret;
6574 }
6575 
6576 static void
6577 build_postexe_iseq(rb_iseq_t *iseq, LINK_ANCHOR *ret, const void *ptr)
6578 {
6579  const NODE *body = ptr;
6580  int line = nd_line(body);
6581  VALUE argc = INT2FIX(0);
6582  const rb_iseq_t *block = NEW_CHILD_ISEQ(body, make_name_for_block(ISEQ_BODY(iseq)->parent_iseq), ISEQ_TYPE_BLOCK, line);
6583 
6584  ADD_INSN1(ret, body, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6585  ADD_CALL_WITH_BLOCK(ret, body, id_core_set_postexe, argc, block);
6586  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)block);
6587  iseq_set_local_table(iseq, 0, 0);
6588 }
6589 
6590 static void
6591 compile_named_capture_assign(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node)
6592 {
6593  const NODE *vars;
6594  LINK_ELEMENT *last;
6595  int line = nd_line(node);
6596  const NODE *line_node = node;
6597  LABEL *fail_label = NEW_LABEL(line), *end_label = NEW_LABEL(line);
6598 
6599 #if !(defined(NAMED_CAPTURE_BY_SVAR) && NAMED_CAPTURE_BY_SVAR-0)
6600  ADD_INSN1(ret, line_node, getglobal, ID2SYM(idBACKREF));
6601 #else
6602  ADD_INSN2(ret, line_node, getspecial, INT2FIX(1) /* '~' */, INT2FIX(0));
6603 #endif
6604  ADD_INSN(ret, line_node, dup);
6605  ADD_INSNL(ret, line_node, branchunless, fail_label);
6606 
6607  for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6608  INSN *cap;
6609  if (RNODE_BLOCK(vars)->nd_next) {
6610  ADD_INSN(ret, line_node, dup);
6611  }
6612  last = ret->last;
6613  NO_CHECK(COMPILE_POPPED(ret, "capture", RNODE_BLOCK(vars)->nd_head));
6614  last = last->next; /* putobject :var */
6615  cap = new_insn_send(iseq, nd_line(line_node), nd_node_id(line_node), idAREF, INT2FIX(1),
6616  NULL, INT2FIX(0), NULL);
6617  ELEM_INSERT_PREV(last->next, (LINK_ELEMENT *)cap);
6618 #if !defined(NAMED_CAPTURE_SINGLE_OPT) || NAMED_CAPTURE_SINGLE_OPT-0
6619  if (!RNODE_BLOCK(vars)->nd_next && vars == node) {
6620  /* only one name */
6621  DECL_ANCHOR(nom);
6622 
6623  INIT_ANCHOR(nom);
6624  ADD_INSNL(nom, line_node, jump, end_label);
6625  ADD_LABEL(nom, fail_label);
6626 # if 0 /* $~ must be MatchData or nil */
6627  ADD_INSN(nom, line_node, pop);
6628  ADD_INSN(nom, line_node, putnil);
6629 # endif
6630  ADD_LABEL(nom, end_label);
6631  (nom->last->next = cap->link.next)->prev = nom->last;
6632  (cap->link.next = nom->anchor.next)->prev = &cap->link;
6633  return;
6634  }
6635 #endif
6636  }
6637  ADD_INSNL(ret, line_node, jump, end_label);
6638  ADD_LABEL(ret, fail_label);
6639  ADD_INSN(ret, line_node, pop);
6640  for (vars = node; vars; vars = RNODE_BLOCK(vars)->nd_next) {
6641  last = ret->last;
6642  NO_CHECK(COMPILE_POPPED(ret, "capture", RNODE_BLOCK(vars)->nd_head));
6643  last = last->next; /* putobject :var */
6644  ((INSN*)last)->insn_id = BIN(putnil);
6645  ((INSN*)last)->operand_size = 0;
6646  }
6647  ADD_LABEL(ret, end_label);
6648 }
6649 
6650 static int
6651 optimizable_range_item_p(const NODE *n)
6652 {
6653  if (!n) return FALSE;
6654  switch (nd_type(n)) {
6655  case NODE_LINE:
6656  return TRUE;
6657  case NODE_INTEGER:
6658  return TRUE;
6659  case NODE_NIL:
6660  return TRUE;
6661  default:
6662  return FALSE;
6663  }
6664 }
6665 
6666 static VALUE
6667 optimized_range_item(const NODE *n)
6668 {
6669  switch (nd_type(n)) {
6670  case NODE_LINE:
6671  return rb_node_line_lineno_val(n);
6672  case NODE_INTEGER:
6673  return rb_node_integer_literal_val(n);
6674  case NODE_FLOAT:
6675  return rb_node_float_literal_val(n);
6676  case NODE_RATIONAL:
6677  return rb_node_rational_literal_val(n);
6678  case NODE_IMAGINARY:
6679  return rb_node_imaginary_literal_val(n);
6680  case NODE_NIL:
6681  return Qnil;
6682  default:
6683  rb_bug("unexpected node: %s", ruby_node_name(nd_type(n)));
6684  }
6685 }
6686 
6687 static int
6688 compile_if(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped, const enum node_type type)
6689 {
6690  const NODE *const node_body = type == NODE_IF ? RNODE_IF(node)->nd_body : RNODE_UNLESS(node)->nd_else;
6691  const NODE *const node_else = type == NODE_IF ? RNODE_IF(node)->nd_else : RNODE_UNLESS(node)->nd_body;
6692 
6693  const int line = nd_line(node);
6694  const NODE *line_node = node;
6695  DECL_ANCHOR(cond_seq);
6696  LABEL *then_label, *else_label, *end_label;
6697  VALUE branches = Qfalse;
6698 
6699  INIT_ANCHOR(cond_seq);
6700  then_label = NEW_LABEL(line);
6701  else_label = NEW_LABEL(line);
6702  end_label = 0;
6703 
6704  NODE *cond = RNODE_IF(node)->nd_cond;
6705  if (nd_type(cond) == NODE_BLOCK) {
6706  cond = RNODE_BLOCK(cond)->nd_head;
6707  }
6708 
6709  CHECK(compile_branch_condition(iseq, cond_seq, cond, then_label, else_label));
6710  ADD_SEQ(ret, cond_seq);
6711 
6712  if (then_label->refcnt && else_label->refcnt) {
6713  branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node), type == NODE_IF ? "if" : "unless");
6714  }
6715 
6716  if (then_label->refcnt) {
6717  ADD_LABEL(ret, then_label);
6718 
6719  DECL_ANCHOR(then_seq);
6720  INIT_ANCHOR(then_seq);
6721  CHECK(COMPILE_(then_seq, "then", node_body, popped));
6722 
6723  if (else_label->refcnt) {
6724  const NODE *const coverage_node = node_body ? node_body : node;
6725  add_trace_branch_coverage(
6726  iseq,
6727  ret,
6728  nd_code_loc(coverage_node),
6729  nd_node_id(coverage_node),
6730  0,
6731  type == NODE_IF ? "then" : "else",
6732  branches);
6733  end_label = NEW_LABEL(line);
6734  ADD_INSNL(then_seq, line_node, jump, end_label);
6735  if (!popped) {
6736  ADD_INSN(then_seq, line_node, pop);
6737  }
6738  }
6739  ADD_SEQ(ret, then_seq);
6740  }
6741 
6742  if (else_label->refcnt) {
6743  ADD_LABEL(ret, else_label);
6744 
6745  DECL_ANCHOR(else_seq);
6746  INIT_ANCHOR(else_seq);
6747  CHECK(COMPILE_(else_seq, "else", node_else, popped));
6748 
6749  if (then_label->refcnt) {
6750  const NODE *const coverage_node = node_else ? node_else : node;
6751  add_trace_branch_coverage(
6752  iseq,
6753  ret,
6754  nd_code_loc(coverage_node),
6755  nd_node_id(coverage_node),
6756  1,
6757  type == NODE_IF ? "else" : "then",
6758  branches);
6759  }
6760  ADD_SEQ(ret, else_seq);
6761  }
6762 
6763  if (end_label) {
6764  ADD_LABEL(ret, end_label);
6765  }
6766 
6767  return COMPILE_OK;
6768 }
6769 
6770 static int
6771 compile_case(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const orig_node, int popped)
6772 {
6773  const NODE *vals;
6774  const NODE *node = orig_node;
6775  LABEL *endlabel, *elselabel;
6776  DECL_ANCHOR(head);
6777  DECL_ANCHOR(body_seq);
6778  DECL_ANCHOR(cond_seq);
6779  int only_special_literals = 1;
6780  VALUE literals = rb_hash_new();
6781  int line;
6782  enum node_type type;
6783  const NODE *line_node;
6784  VALUE branches = Qfalse;
6785  int branch_id = 0;
6786 
6787  INIT_ANCHOR(head);
6788  INIT_ANCHOR(body_seq);
6789  INIT_ANCHOR(cond_seq);
6790 
6791  RHASH_TBL_RAW(literals)->type = &cdhash_type;
6792 
6793  CHECK(COMPILE(head, "case base", RNODE_CASE(node)->nd_head));
6794 
6795  branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node), "case");
6796 
6797  node = RNODE_CASE(node)->nd_body;
6798  EXPECT_NODE("NODE_CASE", node, NODE_WHEN, COMPILE_NG);
6799  type = nd_type(node);
6800  line = nd_line(node);
6801  line_node = node;
6802 
6803  endlabel = NEW_LABEL(line);
6804  elselabel = NEW_LABEL(line);
6805 
6806  ADD_SEQ(ret, head); /* case VAL */
6807 
6808  while (type == NODE_WHEN) {
6809  LABEL *l1;
6810 
6811  l1 = NEW_LABEL(line);
6812  ADD_LABEL(body_seq, l1);
6813  ADD_INSN(body_seq, line_node, pop);
6814 
6815  const NODE *const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
6816  add_trace_branch_coverage(
6817  iseq,
6818  body_seq,
6819  nd_code_loc(coverage_node),
6820  nd_node_id(coverage_node),
6821  branch_id++,
6822  "when",
6823  branches);
6824 
6825  CHECK(COMPILE_(body_seq, "when body", RNODE_WHEN(node)->nd_body, popped));
6826  ADD_INSNL(body_seq, line_node, jump, endlabel);
6827 
6828  vals = RNODE_WHEN(node)->nd_head;
6829  if (vals) {
6830  switch (nd_type(vals)) {
6831  case NODE_LIST:
6832  only_special_literals = when_vals(iseq, cond_seq, vals, l1, only_special_literals, literals);
6833  if (only_special_literals < 0) return COMPILE_NG;
6834  break;
6835  case NODE_SPLAT:
6836  case NODE_ARGSCAT:
6837  case NODE_ARGSPUSH:
6838  only_special_literals = 0;
6839  CHECK(when_splat_vals(iseq, cond_seq, vals, l1, only_special_literals, literals));
6840  break;
6841  default:
6842  UNKNOWN_NODE("NODE_CASE", vals, COMPILE_NG);
6843  }
6844  }
6845  else {
6846  EXPECT_NODE_NONULL("NODE_CASE", node, NODE_LIST, COMPILE_NG);
6847  }
6848 
6849  node = RNODE_WHEN(node)->nd_next;
6850  if (!node) {
6851  break;
6852  }
6853  type = nd_type(node);
6854  line = nd_line(node);
6855  line_node = node;
6856  }
6857  /* else */
6858  if (node) {
6859  ADD_LABEL(cond_seq, elselabel);
6860  ADD_INSN(cond_seq, line_node, pop);
6861  add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id, "else", branches);
6862  CHECK(COMPILE_(cond_seq, "else", node, popped));
6863  ADD_INSNL(cond_seq, line_node, jump, endlabel);
6864  }
6865  else {
6866  debugs("== else (implicit)\n");
6867  ADD_LABEL(cond_seq, elselabel);
6868  ADD_INSN(cond_seq, orig_node, pop);
6869  add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id, "else", branches);
6870  if (!popped) {
6871  ADD_INSN(cond_seq, orig_node, putnil);
6872  }
6873  ADD_INSNL(cond_seq, orig_node, jump, endlabel);
6874  }
6875 
6876  if (only_special_literals && ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
6877  ADD_INSN(ret, orig_node, dup);
6878  ADD_INSN2(ret, orig_node, opt_case_dispatch, literals, elselabel);
6879  RB_OBJ_WRITTEN(iseq, Qundef, literals);
6880  LABEL_REF(elselabel);
6881  }
6882 
6883  ADD_SEQ(ret, cond_seq);
6884  ADD_SEQ(ret, body_seq);
6885  ADD_LABEL(ret, endlabel);
6886  return COMPILE_OK;
6887 }
6888 
6889 static int
6890 compile_case2(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const orig_node, int popped)
6891 {
6892  const NODE *vals;
6893  const NODE *val;
6894  const NODE *node = RNODE_CASE2(orig_node)->nd_body;
6895  LABEL *endlabel;
6896  DECL_ANCHOR(body_seq);
6897  VALUE branches = Qfalse;
6898  int branch_id = 0;
6899 
6900  branches = decl_branch_base(iseq, PTR2NUM(orig_node), nd_code_loc(orig_node), "case");
6901 
6902  INIT_ANCHOR(body_seq);
6903  endlabel = NEW_LABEL(nd_line(node));
6904 
6905  while (node && nd_type_p(node, NODE_WHEN)) {
6906  const int line = nd_line(node);
6907  LABEL *l1 = NEW_LABEL(line);
6908  ADD_LABEL(body_seq, l1);
6909 
6910  const NODE *const coverage_node = RNODE_WHEN(node)->nd_body ? RNODE_WHEN(node)->nd_body : node;
6911  add_trace_branch_coverage(
6912  iseq,
6913  body_seq,
6914  nd_code_loc(coverage_node),
6915  nd_node_id(coverage_node),
6916  branch_id++,
6917  "when",
6918  branches);
6919 
6920  CHECK(COMPILE_(body_seq, "when", RNODE_WHEN(node)->nd_body, popped));
6921  ADD_INSNL(body_seq, node, jump, endlabel);
6922 
6923  vals = RNODE_WHEN(node)->nd_head;
6924  if (!vals) {
6925  EXPECT_NODE_NONULL("NODE_WHEN", node, NODE_LIST, COMPILE_NG);
6926  }
6927  switch (nd_type(vals)) {
6928  case NODE_LIST:
6929  while (vals) {
6930  LABEL *lnext;
6931  val = RNODE_LIST(vals)->nd_head;
6932  lnext = NEW_LABEL(nd_line(val));
6933  debug_compile("== when2\n", (void)0);
6934  CHECK(compile_branch_condition(iseq, ret, val, l1, lnext));
6935  ADD_LABEL(ret, lnext);
6936  vals = RNODE_LIST(vals)->nd_next;
6937  }
6938  break;
6939  case NODE_SPLAT:
6940  case NODE_ARGSCAT:
6941  case NODE_ARGSPUSH:
6942  ADD_INSN(ret, vals, putnil);
6943  CHECK(COMPILE(ret, "when2/cond splat", vals));
6944  ADD_INSN1(ret, vals, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_WHEN | VM_CHECKMATCH_ARRAY));
6945  ADD_INSNL(ret, vals, branchif, l1);
6946  break;
6947  default:
6948  UNKNOWN_NODE("NODE_WHEN", vals, COMPILE_NG);
6949  }
6950  node = RNODE_WHEN(node)->nd_next;
6951  }
6952  /* else */
6953  const NODE *const coverage_node = node ? node : orig_node;
6954  add_trace_branch_coverage(
6955  iseq,
6956  ret,
6957  nd_code_loc(coverage_node),
6958  nd_node_id(coverage_node),
6959  branch_id,
6960  "else",
6961  branches);
6962  CHECK(COMPILE_(ret, "else", node, popped));
6963  ADD_INSNL(ret, orig_node, jump, endlabel);
6964 
6965  ADD_SEQ(ret, body_seq);
6966  ADD_LABEL(ret, endlabel);
6967  return COMPILE_OK;
6968 }
6969 
6970 static int iseq_compile_pattern_match(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, LABEL *unmatched, bool in_single_pattern, bool in_alt_pattern, int base_index, bool use_deconstructed_cache);
6971 
6972 static int iseq_compile_pattern_constant(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, LABEL *match_failed, bool in_single_pattern, int base_index);
6973 static int iseq_compile_array_deconstruct(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, LABEL *deconstruct, LABEL *deconstructed, LABEL *match_failed, LABEL *type_error, bool in_single_pattern, int base_index, bool use_deconstructed_cache);
6974 static int iseq_compile_pattern_set_general_errmsg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, VALUE errmsg, int base_index);
6975 static int iseq_compile_pattern_set_length_errmsg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, VALUE errmsg, VALUE pattern_length, int base_index);
6976 static int iseq_compile_pattern_set_eqq_errmsg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int base_index);
6977 
6978 #define CASE3_BI_OFFSET_DECONSTRUCTED_CACHE 0
6979 #define CASE3_BI_OFFSET_ERROR_STRING 1
6980 #define CASE3_BI_OFFSET_KEY_ERROR_P 2
6981 #define CASE3_BI_OFFSET_KEY_ERROR_MATCHEE 3
6982 #define CASE3_BI_OFFSET_KEY_ERROR_KEY 4
6983 
6984 static int
6985 iseq_compile_pattern_each(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, LABEL *matched, LABEL *unmatched, bool in_single_pattern, bool in_alt_pattern, int base_index, bool use_deconstructed_cache)
6986 {
6987  const int line = nd_line(node);
6988  const NODE *line_node = node;
6989 
6990  switch (nd_type(node)) {
6991  case NODE_ARYPTN: {
6992  /*
6993  * if pattern.use_rest_num?
6994  * rest_num = 0
6995  * end
6996  * if pattern.has_constant_node?
6997  * unless pattern.constant === obj
6998  * goto match_failed
6999  * end
7000  * end
7001  * unless obj.respond_to?(:deconstruct)
7002  * goto match_failed
7003  * end
7004  * d = obj.deconstruct
7005  * unless Array === d
7006  * goto type_error
7007  * end
7008  * min_argc = pattern.pre_args_num + pattern.post_args_num
7009  * if pattern.has_rest_arg?
7010  * unless d.length >= min_argc
7011  * goto match_failed
7012  * end
7013  * else
7014  * unless d.length == min_argc
7015  * goto match_failed
7016  * end
7017  * end
7018  * pattern.pre_args_num.each do |i|
7019  * unless pattern.pre_args[i].match?(d[i])
7020  * goto match_failed
7021  * end
7022  * end
7023  * if pattern.use_rest_num?
7024  * rest_num = d.length - min_argc
7025  * if pattern.has_rest_arg? && pattern.has_rest_arg_id # not `*`, but `*rest`
7026  * unless pattern.rest_arg.match?(d[pattern.pre_args_num, rest_num])
7027  * goto match_failed
7028  * end
7029  * end
7030  * end
7031  * pattern.post_args_num.each do |i|
7032  * j = pattern.pre_args_num + i
7033  * j += rest_num
7034  * unless pattern.post_args[i].match?(d[j])
7035  * goto match_failed
7036  * end
7037  * end
7038  * goto matched
7039  * type_error:
7040  * FrozenCore.raise TypeError
7041  * match_failed:
7042  * goto unmatched
7043  */
7044  const NODE *args = RNODE_ARYPTN(node)->pre_args;
7045  const int pre_args_num = RNODE_ARYPTN(node)->pre_args ? rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->pre_args)->as.nd_alen) : 0;
7046  const int post_args_num = RNODE_ARYPTN(node)->post_args ? rb_long2int(RNODE_LIST(RNODE_ARYPTN(node)->post_args)->as.nd_alen) : 0;
7047 
7048  const int min_argc = pre_args_num + post_args_num;
7049  const int use_rest_num = RNODE_ARYPTN(node)->rest_arg && (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) ||
7050  (!NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg) && post_args_num > 0));
7051 
7052  LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7053  int i;
7054  match_failed = NEW_LABEL(line);
7055  type_error = NEW_LABEL(line);
7056  deconstruct = NEW_LABEL(line);
7057  deconstructed = NEW_LABEL(line);
7058 
7059  if (use_rest_num) {
7060  ADD_INSN1(ret, line_node, putobject, INT2FIX(0)); /* allocate stack for rest_num */
7061  ADD_INSN(ret, line_node, swap);
7062  if (base_index) {
7063  base_index++;
7064  }
7065  }
7066 
7067  CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7068 
7069  CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7070 
7071  ADD_INSN(ret, line_node, dup);
7072  ADD_SEND(ret, line_node, idLength, INT2FIX(0));
7073  ADD_INSN1(ret, line_node, putobject, INT2FIX(min_argc));
7074  ADD_SEND(ret, line_node, RNODE_ARYPTN(node)->rest_arg ? idGE : idEq, INT2FIX(1)); // (1)
7075  if (in_single_pattern) {
7076  CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node,
7077  RNODE_ARYPTN(node)->rest_arg ? rb_fstring_lit("%p length mismatch (given %p, expected %p+)") :
7078  rb_fstring_lit("%p length mismatch (given %p, expected %p)"),
7079  INT2FIX(min_argc), base_index + 1 /* (1) */));
7080  }
7081  ADD_INSNL(ret, line_node, branchunless, match_failed);
7082 
7083  for (i = 0; i < pre_args_num; i++) {
7084  ADD_INSN(ret, line_node, dup);
7085  ADD_INSN1(ret, line_node, putobject, INT2FIX(i));
7086  ADD_SEND(ret, line_node, idAREF, INT2FIX(1)); // (2)
7087  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 /* (2) */, false));
7088  args = RNODE_LIST(args)->nd_next;
7089  }
7090 
7091  if (RNODE_ARYPTN(node)->rest_arg) {
7092  if (NODE_NAMED_REST_P(RNODE_ARYPTN(node)->rest_arg)) {
7093  ADD_INSN(ret, line_node, dup);
7094  ADD_INSN1(ret, line_node, putobject, INT2FIX(pre_args_num));
7095  ADD_INSN1(ret, line_node, topn, INT2FIX(1));
7096  ADD_SEND(ret, line_node, idLength, INT2FIX(0));
7097  ADD_INSN1(ret, line_node, putobject, INT2FIX(min_argc));
7098  ADD_SEND(ret, line_node, idMINUS, INT2FIX(1));
7099  ADD_INSN1(ret, line_node, setn, INT2FIX(4));
7100  ADD_SEND(ret, line_node, idAREF, INT2FIX(2)); // (3)
7101 
7102  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_ARYPTN(node)->rest_arg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 /* (3) */, false));
7103  }
7104  else {
7105  if (post_args_num > 0) {
7106  ADD_INSN(ret, line_node, dup);
7107  ADD_SEND(ret, line_node, idLength, INT2FIX(0));
7108  ADD_INSN1(ret, line_node, putobject, INT2FIX(min_argc));
7109  ADD_SEND(ret, line_node, idMINUS, INT2FIX(1));
7110  ADD_INSN1(ret, line_node, setn, INT2FIX(2));
7111  ADD_INSN(ret, line_node, pop);
7112  }
7113  }
7114  }
7115 
7116  args = RNODE_ARYPTN(node)->post_args;
7117  for (i = 0; i < post_args_num; i++) {
7118  ADD_INSN(ret, line_node, dup);
7119 
7120  ADD_INSN1(ret, line_node, putobject, INT2FIX(pre_args_num + i));
7121  ADD_INSN1(ret, line_node, topn, INT2FIX(3));
7122  ADD_SEND(ret, line_node, idPLUS, INT2FIX(1));
7123 
7124  ADD_SEND(ret, line_node, idAREF, INT2FIX(1)); // (4)
7125  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 /* (4) */, false));
7126  args = RNODE_LIST(args)->nd_next;
7127  }
7128 
7129  ADD_INSN(ret, line_node, pop);
7130  if (use_rest_num) {
7131  ADD_INSN(ret, line_node, pop);
7132  }
7133  ADD_INSNL(ret, line_node, jump, matched);
7134  ADD_INSN(ret, line_node, putnil);
7135  if (use_rest_num) {
7136  ADD_INSN(ret, line_node, putnil);
7137  }
7138 
7139  ADD_LABEL(ret, type_error);
7140  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7141  ADD_INSN1(ret, line_node, putobject, rb_eTypeError);
7142  ADD_INSN1(ret, line_node, putobject, rb_fstring_lit("deconstruct must return Array"));
7143  ADD_SEND(ret, line_node, id_core_raise, INT2FIX(2));
7144  ADD_INSN(ret, line_node, pop);
7145 
7146  ADD_LABEL(ret, match_failed);
7147  ADD_INSN(ret, line_node, pop);
7148  if (use_rest_num) {
7149  ADD_INSN(ret, line_node, pop);
7150  }
7151  ADD_INSNL(ret, line_node, jump, unmatched);
7152 
7153  break;
7154  }
7155  case NODE_FNDPTN: {
7156  /*
7157  * if pattern.has_constant_node?
7158  * unless pattern.constant === obj
7159  * goto match_failed
7160  * end
7161  * end
7162  * unless obj.respond_to?(:deconstruct)
7163  * goto match_failed
7164  * end
7165  * d = obj.deconstruct
7166  * unless Array === d
7167  * goto type_error
7168  * end
7169  * unless d.length >= pattern.args_num
7170  * goto match_failed
7171  * end
7172  *
7173  * begin
7174  * len = d.length
7175  * limit = d.length - pattern.args_num
7176  * i = 0
7177  * while i <= limit
7178  * if pattern.args_num.times.all? {|j| pattern.args[j].match?(d[i+j]) }
7179  * if pattern.has_pre_rest_arg_id
7180  * unless pattern.pre_rest_arg.match?(d[0, i])
7181  * goto find_failed
7182  * end
7183  * end
7184  * if pattern.has_post_rest_arg_id
7185  * unless pattern.post_rest_arg.match?(d[i+pattern.args_num, len])
7186  * goto find_failed
7187  * end
7188  * end
7189  * goto find_succeeded
7190  * end
7191  * i+=1
7192  * end
7193  * find_failed:
7194  * goto match_failed
7195  * find_succeeded:
7196  * end
7197  *
7198  * goto matched
7199  * type_error:
7200  * FrozenCore.raise TypeError
7201  * match_failed:
7202  * goto unmatched
7203  */
7204  const NODE *args = RNODE_FNDPTN(node)->args;
7205  const int args_num = RNODE_FNDPTN(node)->args ? rb_long2int(RNODE_LIST(RNODE_FNDPTN(node)->args)->as.nd_alen) : 0;
7206 
7207  LABEL *match_failed, *type_error, *deconstruct, *deconstructed;
7208  match_failed = NEW_LABEL(line);
7209  type_error = NEW_LABEL(line);
7210  deconstruct = NEW_LABEL(line);
7211  deconstructed = NEW_LABEL(line);
7212 
7213  CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7214 
7215  CHECK(iseq_compile_array_deconstruct(iseq, ret, node, deconstruct, deconstructed, match_failed, type_error, in_single_pattern, base_index, use_deconstructed_cache));
7216 
7217  ADD_INSN(ret, line_node, dup);
7218  ADD_SEND(ret, line_node, idLength, INT2FIX(0));
7219  ADD_INSN1(ret, line_node, putobject, INT2FIX(args_num));
7220  ADD_SEND(ret, line_node, idGE, INT2FIX(1)); // (1)
7221  if (in_single_pattern) {
7222  CHECK(iseq_compile_pattern_set_length_errmsg(iseq, ret, node, rb_fstring_lit("%p length mismatch (given %p, expected %p+)"), INT2FIX(args_num), base_index + 1 /* (1) */));
7223  }
7224  ADD_INSNL(ret, line_node, branchunless, match_failed);
7225 
7226  {
7227  LABEL *while_begin = NEW_LABEL(nd_line(node));
7228  LABEL *next_loop = NEW_LABEL(nd_line(node));
7229  LABEL *find_succeeded = NEW_LABEL(line);
7230  LABEL *find_failed = NEW_LABEL(nd_line(node));
7231  int j;
7232 
7233  ADD_INSN(ret, line_node, dup); /* allocate stack for len */
7234  ADD_SEND(ret, line_node, idLength, INT2FIX(0)); // (2)
7235 
7236  ADD_INSN(ret, line_node, dup); /* allocate stack for limit */
7237  ADD_INSN1(ret, line_node, putobject, INT2FIX(args_num));
7238  ADD_SEND(ret, line_node, idMINUS, INT2FIX(1)); // (3)
7239 
7240  ADD_INSN1(ret, line_node, putobject, INT2FIX(0)); /* allocate stack for i */ // (4)
7241 
7242  ADD_LABEL(ret, while_begin);
7243 
7244  ADD_INSN(ret, line_node, dup);
7245  ADD_INSN1(ret, line_node, topn, INT2FIX(2));
7246  ADD_SEND(ret, line_node, idLE, INT2FIX(1));
7247  ADD_INSNL(ret, line_node, branchunless, find_failed);
7248 
7249  for (j = 0; j < args_num; j++) {
7250  ADD_INSN1(ret, line_node, topn, INT2FIX(3));
7251  ADD_INSN1(ret, line_node, topn, INT2FIX(1));
7252  if (j != 0) {
7253  ADD_INSN1(ret, line_node, putobject, INT2FIX(j));
7254  ADD_SEND(ret, line_node, idPLUS, INT2FIX(1));
7255  }
7256  ADD_SEND(ret, line_node, idAREF, INT2FIX(1)); // (5)
7257 
7258  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(args)->nd_head, next_loop, in_single_pattern, in_alt_pattern, base_index + 4 /* (2), (3), (4), (5) */, false));
7259  args = RNODE_LIST(args)->nd_next;
7260  }
7261 
7262  if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->pre_rest_arg)) {
7263  ADD_INSN1(ret, line_node, topn, INT2FIX(3));
7264  ADD_INSN1(ret, line_node, putobject, INT2FIX(0));
7265  ADD_INSN1(ret, line_node, topn, INT2FIX(2));
7266  ADD_SEND(ret, line_node, idAREF, INT2FIX(2)); // (6)
7267  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->pre_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 /* (2), (3), (4), (6) */, false));
7268  }
7269  if (NODE_NAMED_REST_P(RNODE_FNDPTN(node)->post_rest_arg)) {
7270  ADD_INSN1(ret, line_node, topn, INT2FIX(3));
7271  ADD_INSN1(ret, line_node, topn, INT2FIX(1));
7272  ADD_INSN1(ret, line_node, putobject, INT2FIX(args_num));
7273  ADD_SEND(ret, line_node, idPLUS, INT2FIX(1));
7274  ADD_INSN1(ret, line_node, topn, INT2FIX(3));
7275  ADD_SEND(ret, line_node, idAREF, INT2FIX(2)); // (7)
7276  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_FNDPTN(node)->post_rest_arg, find_failed, in_single_pattern, in_alt_pattern, base_index + 4 /* (2), (3),(4), (7) */, false));
7277  }
7278  ADD_INSNL(ret, line_node, jump, find_succeeded);
7279 
7280  ADD_LABEL(ret, next_loop);
7281  ADD_INSN1(ret, line_node, putobject, INT2FIX(1));
7282  ADD_SEND(ret, line_node, idPLUS, INT2FIX(1));
7283  ADD_INSNL(ret, line_node, jump, while_begin);
7284 
7285  ADD_LABEL(ret, find_failed);
7286  ADD_INSN1(ret, line_node, adjuststack, INT2FIX(3));
7287  if (in_single_pattern) {
7288  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7289  ADD_INSN1(ret, line_node, putobject, rb_fstring_lit("%p does not match to find pattern"));
7290  ADD_INSN1(ret, line_node, topn, INT2FIX(2));
7291  ADD_SEND(ret, line_node, id_core_sprintf, INT2FIX(2)); // (8)
7292  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 /* (8) */)); // (9)
7293 
7294  ADD_INSN1(ret, line_node, putobject, Qfalse);
7295  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 /* (8), (9) */));
7296 
7297  ADD_INSN(ret, line_node, pop);
7298  ADD_INSN(ret, line_node, pop);
7299  }
7300  ADD_INSNL(ret, line_node, jump, match_failed);
7301  ADD_INSN1(ret, line_node, dupn, INT2FIX(3));
7302 
7303  ADD_LABEL(ret, find_succeeded);
7304  ADD_INSN1(ret, line_node, adjuststack, INT2FIX(3));
7305  }
7306 
7307  ADD_INSN(ret, line_node, pop);
7308  ADD_INSNL(ret, line_node, jump, matched);
7309  ADD_INSN(ret, line_node, putnil);
7310 
7311  ADD_LABEL(ret, type_error);
7312  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7313  ADD_INSN1(ret, line_node, putobject, rb_eTypeError);
7314  ADD_INSN1(ret, line_node, putobject, rb_fstring_lit("deconstruct must return Array"));
7315  ADD_SEND(ret, line_node, id_core_raise, INT2FIX(2));
7316  ADD_INSN(ret, line_node, pop);
7317 
7318  ADD_LABEL(ret, match_failed);
7319  ADD_INSN(ret, line_node, pop);
7320  ADD_INSNL(ret, line_node, jump, unmatched);
7321 
7322  break;
7323  }
7324  case NODE_HSHPTN: {
7325  /*
7326  * keys = nil
7327  * if pattern.has_kw_args_node? && !pattern.has_kw_rest_arg_node?
7328  * keys = pattern.kw_args_node.keys
7329  * end
7330  * if pattern.has_constant_node?
7331  * unless pattern.constant === obj
7332  * goto match_failed
7333  * end
7334  * end
7335  * unless obj.respond_to?(:deconstruct_keys)
7336  * goto match_failed
7337  * end
7338  * d = obj.deconstruct_keys(keys)
7339  * unless Hash === d
7340  * goto type_error
7341  * end
7342  * if pattern.has_kw_rest_arg_node?
7343  * d = d.dup
7344  * end
7345  * if pattern.has_kw_args_node?
7346  * pattern.kw_args_node.each |k,|
7347  * unless d.key?(k)
7348  * goto match_failed
7349  * end
7350  * end
7351  * pattern.kw_args_node.each |k, pat|
7352  * if pattern.has_kw_rest_arg_node?
7353  * unless pat.match?(d.delete(k))
7354  * goto match_failed
7355  * end
7356  * else
7357  * unless pat.match?(d[k])
7358  * goto match_failed
7359  * end
7360  * end
7361  * end
7362  * else
7363  * unless d.empty?
7364  * goto match_failed
7365  * end
7366  * end
7367  * if pattern.has_kw_rest_arg_node?
7368  * if pattern.no_rest_keyword?
7369  * unless d.empty?
7370  * goto match_failed
7371  * end
7372  * else
7373  * unless pattern.kw_rest_arg_node.match?(d)
7374  * goto match_failed
7375  * end
7376  * end
7377  * end
7378  * goto matched
7379  * type_error:
7380  * FrozenCore.raise TypeError
7381  * match_failed:
7382  * goto unmatched
7383  */
7384  LABEL *match_failed, *type_error;
7385  VALUE keys = Qnil;
7386 
7387  match_failed = NEW_LABEL(line);
7388  type_error = NEW_LABEL(line);
7389 
7390  if (RNODE_HSHPTN(node)->nd_pkwargs && !RNODE_HSHPTN(node)->nd_pkwrestarg) {
7391  const NODE *kw_args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7392  keys = rb_ary_new_capa(kw_args ? RNODE_LIST(kw_args)->as.nd_alen/2 : 0);
7393  while (kw_args) {
7394  rb_ary_push(keys, get_symbol_value(iseq, RNODE_LIST(kw_args)->nd_head));
7395  kw_args = RNODE_LIST(RNODE_LIST(kw_args)->nd_next)->nd_next;
7396  }
7397  }
7398 
7399  CHECK(iseq_compile_pattern_constant(iseq, ret, node, match_failed, in_single_pattern, base_index));
7400 
7401  ADD_INSN(ret, line_node, dup);
7402  ADD_INSN1(ret, line_node, putobject, ID2SYM(rb_intern("deconstruct_keys")));
7403  ADD_SEND(ret, line_node, idRespond_to, INT2FIX(1)); // (1)
7404  if (in_single_pattern) {
7405  CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit("%p does not respond to #deconstruct_keys"), base_index + 1 /* (1) */));
7406  }
7407  ADD_INSNL(ret, line_node, branchunless, match_failed);
7408 
7409  if (NIL_P(keys)) {
7410  ADD_INSN(ret, line_node, putnil);
7411  }
7412  else {
7413  ADD_INSN1(ret, line_node, duparray, keys);
7414  RB_OBJ_WRITTEN(iseq, Qundef, rb_obj_hide(keys));
7415  }
7416  ADD_SEND(ret, line_node, rb_intern("deconstruct_keys"), INT2FIX(1)); // (2)
7417 
7418  ADD_INSN(ret, line_node, dup);
7419  ADD_INSN1(ret, line_node, checktype, INT2FIX(T_HASH));
7420  ADD_INSNL(ret, line_node, branchunless, type_error);
7421 
7422  if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7423  ADD_SEND(ret, line_node, rb_intern("dup"), INT2FIX(0));
7424  }
7425 
7426  if (RNODE_HSHPTN(node)->nd_pkwargs) {
7427  int i;
7428  int keys_num;
7429  const NODE *args;
7430  args = RNODE_HASH(RNODE_HSHPTN(node)->nd_pkwargs)->nd_head;
7431  if (args) {
7432  DECL_ANCHOR(match_values);
7433  INIT_ANCHOR(match_values);
7434  keys_num = rb_long2int(RNODE_LIST(args)->as.nd_alen) / 2;
7435  for (i = 0; i < keys_num; i++) {
7436  NODE *key_node = RNODE_LIST(args)->nd_head;
7437  NODE *value_node = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_head;
7438  VALUE key = get_symbol_value(iseq, key_node);
7439 
7440  ADD_INSN(ret, line_node, dup);
7441  ADD_INSN1(ret, line_node, putobject, key);
7442  ADD_SEND(ret, line_node, rb_intern("key?"), INT2FIX(1)); // (3)
7443  if (in_single_pattern) {
7444  LABEL *match_succeeded;
7445  match_succeeded = NEW_LABEL(line);
7446 
7447  ADD_INSN(ret, line_node, dup);
7448  ADD_INSNL(ret, line_node, branchif, match_succeeded);
7449 
7450  ADD_INSN1(ret, line_node, putobject, rb_str_freeze(rb_sprintf("key not found: %+"PRIsVALUE, key))); // (4)
7451  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 2 /* (3), (4) */));
7452  ADD_INSN1(ret, line_node, putobject, Qtrue); // (5)
7453  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 3 /* (3), (4), (5) */));
7454  ADD_INSN1(ret, line_node, topn, INT2FIX(3)); // (6)
7455  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4 /* (3), (4), (5), (6) */));
7456  ADD_INSN1(ret, line_node, putobject, key); // (7)
7457  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_KEY + 5 /* (3), (4), (5), (6), (7) */));
7458 
7459  ADD_INSN1(ret, line_node, adjuststack, INT2FIX(4));
7460 
7461  ADD_LABEL(ret, match_succeeded);
7462  }
7463  ADD_INSNL(ret, line_node, branchunless, match_failed);
7464 
7465  ADD_INSN(match_values, line_node, dup);
7466  ADD_INSN1(match_values, line_node, putobject, key);
7467  ADD_SEND(match_values, line_node, RNODE_HSHPTN(node)->nd_pkwrestarg ? rb_intern("delete") : idAREF, INT2FIX(1)); // (8)
7468  CHECK(iseq_compile_pattern_match(iseq, match_values, value_node, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 /* (8) */, false));
7469  args = RNODE_LIST(RNODE_LIST(args)->nd_next)->nd_next;
7470  }
7471  ADD_SEQ(ret, match_values);
7472  }
7473  }
7474  else {
7475  ADD_INSN(ret, line_node, dup);
7476  ADD_SEND(ret, line_node, idEmptyP, INT2FIX(0)); // (9)
7477  if (in_single_pattern) {
7478  CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit("%p is not empty"), base_index + 1 /* (9) */));
7479  }
7480  ADD_INSNL(ret, line_node, branchunless, match_failed);
7481  }
7482 
7483  if (RNODE_HSHPTN(node)->nd_pkwrestarg) {
7484  if (RNODE_HSHPTN(node)->nd_pkwrestarg == NODE_SPECIAL_NO_REST_KEYWORD) {
7485  ADD_INSN(ret, line_node, dup);
7486  ADD_SEND(ret, line_node, idEmptyP, INT2FIX(0)); // (10)
7487  if (in_single_pattern) {
7488  CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit("rest of %p is not empty"), base_index + 1 /* (10) */));
7489  }
7490  ADD_INSNL(ret, line_node, branchunless, match_failed);
7491  }
7492  else {
7493  ADD_INSN(ret, line_node, dup); // (11)
7494  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_HSHPTN(node)->nd_pkwrestarg, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 /* (11) */, false));
7495  }
7496  }
7497 
7498  ADD_INSN(ret, line_node, pop);
7499  ADD_INSNL(ret, line_node, jump, matched);
7500  ADD_INSN(ret, line_node, putnil);
7501 
7502  ADD_LABEL(ret, type_error);
7503  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7504  ADD_INSN1(ret, line_node, putobject, rb_eTypeError);
7505  ADD_INSN1(ret, line_node, putobject, rb_fstring_lit("deconstruct_keys must return Hash"));
7506  ADD_SEND(ret, line_node, id_core_raise, INT2FIX(2));
7507  ADD_INSN(ret, line_node, pop);
7508 
7509  ADD_LABEL(ret, match_failed);
7510  ADD_INSN(ret, line_node, pop);
7511  ADD_INSNL(ret, line_node, jump, unmatched);
7512  break;
7513  }
7514  case NODE_SYM:
7515  case NODE_REGX:
7516  case NODE_LINE:
7517  case NODE_INTEGER:
7518  case NODE_FLOAT:
7519  case NODE_RATIONAL:
7520  case NODE_IMAGINARY:
7521  case NODE_FILE:
7522  case NODE_ENCODING:
7523  case NODE_STR:
7524  case NODE_XSTR:
7525  case NODE_DSTR:
7526  case NODE_DSYM:
7527  case NODE_DREGX:
7528  case NODE_LIST:
7529  case NODE_ZLIST:
7530  case NODE_LAMBDA:
7531  case NODE_DOT2:
7532  case NODE_DOT3:
7533  case NODE_CONST:
7534  case NODE_LVAR:
7535  case NODE_DVAR:
7536  case NODE_IVAR:
7537  case NODE_CVAR:
7538  case NODE_GVAR:
7539  case NODE_TRUE:
7540  case NODE_FALSE:
7541  case NODE_SELF:
7542  case NODE_NIL:
7543  case NODE_COLON2:
7544  case NODE_COLON3:
7545  case NODE_BEGIN:
7546  case NODE_BLOCK:
7547  case NODE_ONCE:
7548  CHECK(COMPILE(ret, "case in literal", node)); // (1)
7549  if (in_single_pattern) {
7550  ADD_INSN1(ret, line_node, dupn, INT2FIX(2));
7551  }
7552  ADD_INSN1(ret, line_node, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE)); // (2)
7553  if (in_single_pattern) {
7554  CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 2 /* (1), (2) */));
7555  }
7556  ADD_INSNL(ret, line_node, branchif, matched);
7557  ADD_INSNL(ret, line_node, jump, unmatched);
7558  break;
7559  case NODE_LASGN: {
7560  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
7561  ID id = RNODE_LASGN(node)->nd_vid;
7562  int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq, id);
7563 
7564  if (in_alt_pattern) {
7565  const char *name = rb_id2name(id);
7566  if (name && strlen(name) > 0 && name[0] != '_') {
7567  COMPILE_ERROR(ERROR_ARGS "illegal variable in alternative pattern (%"PRIsVALUE")",
7568  rb_id2str(id));
7569  return COMPILE_NG;
7570  }
7571  }
7572 
7573  ADD_SETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
7574  ADD_INSNL(ret, line_node, jump, matched);
7575  break;
7576  }
7577  case NODE_DASGN: {
7578  int idx, lv, ls;
7579  ID id = RNODE_DASGN(node)->nd_vid;
7580 
7581  idx = get_dyna_var_idx(iseq, id, &lv, &ls);
7582 
7583  if (in_alt_pattern) {
7584  const char *name = rb_id2name(id);
7585  if (name && strlen(name) > 0 && name[0] != '_') {
7586  COMPILE_ERROR(ERROR_ARGS "illegal variable in alternative pattern (%"PRIsVALUE")",
7587  rb_id2str(id));
7588  return COMPILE_NG;
7589  }
7590  }
7591 
7592  if (idx < 0) {
7593  COMPILE_ERROR(ERROR_ARGS "NODE_DASGN: unknown id (%"PRIsVALUE")",
7594  rb_id2str(id));
7595  return COMPILE_NG;
7596  }
7597  ADD_SETLOCAL(ret, line_node, ls - idx, lv);
7598  ADD_INSNL(ret, line_node, jump, matched);
7599  break;
7600  }
7601  case NODE_IF:
7602  case NODE_UNLESS: {
7603  LABEL *match_failed;
7604  match_failed = unmatched;
7605  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_IF(node)->nd_body, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7606  CHECK(COMPILE(ret, "case in if", RNODE_IF(node)->nd_cond));
7607  if (in_single_pattern) {
7608  LABEL *match_succeeded;
7609  match_succeeded = NEW_LABEL(line);
7610 
7611  ADD_INSN(ret, line_node, dup);
7612  if (nd_type_p(node, NODE_IF)) {
7613  ADD_INSNL(ret, line_node, branchif, match_succeeded);
7614  }
7615  else {
7616  ADD_INSNL(ret, line_node, branchunless, match_succeeded);
7617  }
7618 
7619  ADD_INSN1(ret, line_node, putobject, rb_fstring_lit("guard clause does not return true")); // (1)
7620  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 /* (1) */)); // (2)
7621  ADD_INSN1(ret, line_node, putobject, Qfalse);
7622  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 /* (1), (2) */));
7623 
7624  ADD_INSN(ret, line_node, pop);
7625  ADD_INSN(ret, line_node, pop);
7626 
7627  ADD_LABEL(ret, match_succeeded);
7628  }
7629  if (nd_type_p(node, NODE_IF)) {
7630  ADD_INSNL(ret, line_node, branchunless, match_failed);
7631  }
7632  else {
7633  ADD_INSNL(ret, line_node, branchif, match_failed);
7634  }
7635  ADD_INSNL(ret, line_node, jump, matched);
7636  break;
7637  }
7638  case NODE_HASH: {
7639  NODE *n;
7640  LABEL *match_failed;
7641  match_failed = NEW_LABEL(line);
7642 
7643  n = RNODE_HASH(node)->nd_head;
7644  if (! (nd_type_p(n, NODE_LIST) && RNODE_LIST(n)->as.nd_alen == 2)) {
7645  COMPILE_ERROR(ERROR_ARGS "unexpected node");
7646  return COMPILE_NG;
7647  }
7648 
7649  ADD_INSN(ret, line_node, dup); // (1)
7650  CHECK(iseq_compile_pattern_match(iseq, ret, RNODE_LIST(n)->nd_head, match_failed, in_single_pattern, in_alt_pattern, base_index + 1 /* (1) */, use_deconstructed_cache));
7651  CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head, matched, match_failed, in_single_pattern, in_alt_pattern, base_index, false));
7652  ADD_INSN(ret, line_node, putnil);
7653 
7654  ADD_LABEL(ret, match_failed);
7655  ADD_INSN(ret, line_node, pop);
7656  ADD_INSNL(ret, line_node, jump, unmatched);
7657  break;
7658  }
7659  case NODE_OR: {
7660  LABEL *match_succeeded, *fin;
7661  match_succeeded = NEW_LABEL(line);
7662  fin = NEW_LABEL(line);
7663 
7664  ADD_INSN(ret, line_node, dup); // (1)
7665  CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_1st, match_succeeded, fin, in_single_pattern, true, base_index + 1 /* (1) */, use_deconstructed_cache));
7666  ADD_LABEL(ret, match_succeeded);
7667  ADD_INSN(ret, line_node, pop);
7668  ADD_INSNL(ret, line_node, jump, matched);
7669  ADD_INSN(ret, line_node, putnil);
7670  ADD_LABEL(ret, fin);
7671  CHECK(iseq_compile_pattern_each(iseq, ret, RNODE_OR(node)->nd_2nd, matched, unmatched, in_single_pattern, true, base_index, use_deconstructed_cache));
7672  break;
7673  }
7674  default:
7675  UNKNOWN_NODE("NODE_IN", node, COMPILE_NG);
7676  }
7677  return COMPILE_OK;
7678 }
7679 
7680 static int
7681 iseq_compile_pattern_match(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, LABEL *unmatched, bool in_single_pattern, bool in_alt_pattern, int base_index, bool use_deconstructed_cache)
7682 {
7683  LABEL *fin = NEW_LABEL(nd_line(node));
7684  CHECK(iseq_compile_pattern_each(iseq, ret, node, fin, unmatched, in_single_pattern, in_alt_pattern, base_index, use_deconstructed_cache));
7685  ADD_LABEL(ret, fin);
7686  return COMPILE_OK;
7687 }
7688 
7689 static int
7690 iseq_compile_pattern_constant(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, LABEL *match_failed, bool in_single_pattern, int base_index)
7691 {
7692  const NODE *line_node = node;
7693 
7694  if (RNODE_ARYPTN(node)->nd_pconst) {
7695  ADD_INSN(ret, line_node, dup); // (1)
7696  CHECK(COMPILE(ret, "constant", RNODE_ARYPTN(node)->nd_pconst)); // (2)
7697  if (in_single_pattern) {
7698  ADD_INSN1(ret, line_node, dupn, INT2FIX(2));
7699  }
7700  ADD_INSN1(ret, line_node, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE)); // (3)
7701  if (in_single_pattern) {
7702  CHECK(iseq_compile_pattern_set_eqq_errmsg(iseq, ret, node, base_index + 3 /* (1), (2), (3) */));
7703  }
7704  ADD_INSNL(ret, line_node, branchunless, match_failed);
7705  }
7706  return COMPILE_OK;
7707 }
7708 
7709 
7710 static int
7711 iseq_compile_array_deconstruct(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, LABEL *deconstruct, LABEL *deconstructed, LABEL *match_failed, LABEL *type_error, bool in_single_pattern, int base_index, bool use_deconstructed_cache)
7712 {
7713  const NODE *line_node = node;
7714 
7715  // NOTE: this optimization allows us to re-use the #deconstruct value
7716  // (or its absence).
7717  if (use_deconstructed_cache) {
7718  // If value is nil then we haven't tried to deconstruct
7719  ADD_INSN1(ret, line_node, topn, INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
7720  ADD_INSNL(ret, line_node, branchnil, deconstruct);
7721 
7722  // If false then the value is not deconstructable
7723  ADD_INSN1(ret, line_node, topn, INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
7724  ADD_INSNL(ret, line_node, branchunless, match_failed);
7725 
7726  // Drop value, add deconstructed to the stack and jump
7727  ADD_INSN(ret, line_node, pop); // (1)
7728  ADD_INSN1(ret, line_node, topn, INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE - 1 /* (1) */));
7729  ADD_INSNL(ret, line_node, jump, deconstructed);
7730  }
7731  else {
7732  ADD_INSNL(ret, line_node, jump, deconstruct);
7733  }
7734 
7735  ADD_LABEL(ret, deconstruct);
7736  ADD_INSN(ret, line_node, dup);
7737  ADD_INSN1(ret, line_node, putobject, ID2SYM(rb_intern("deconstruct")));
7738  ADD_SEND(ret, line_node, idRespond_to, INT2FIX(1)); // (2)
7739 
7740  // Cache the result of respond_to? (in case it's false is stays there, if true - it's overwritten after #deconstruct)
7741  if (use_deconstructed_cache) {
7742  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE + 1 /* (2) */));
7743  }
7744 
7745  if (in_single_pattern) {
7746  CHECK(iseq_compile_pattern_set_general_errmsg(iseq, ret, node, rb_fstring_lit("%p does not respond to #deconstruct"), base_index + 1 /* (2) */));
7747  }
7748 
7749  ADD_INSNL(ret, line_node, branchunless, match_failed);
7750 
7751  ADD_SEND(ret, line_node, rb_intern("deconstruct"), INT2FIX(0));
7752 
7753  // Cache the result (if it's cacheable - currently, only top-level array patterns)
7754  if (use_deconstructed_cache) {
7755  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_DECONSTRUCTED_CACHE));
7756  }
7757 
7758  ADD_INSN(ret, line_node, dup);
7759  ADD_INSN1(ret, line_node, checktype, INT2FIX(T_ARRAY));
7760  ADD_INSNL(ret, line_node, branchunless, type_error);
7761 
7762  ADD_LABEL(ret, deconstructed);
7763 
7764  return COMPILE_OK;
7765 }
7766 
7767 static int
7768 iseq_compile_pattern_set_general_errmsg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, VALUE errmsg, int base_index)
7769 {
7770  /*
7771  * if match_succeeded?
7772  * goto match_succeeded
7773  * end
7774  * error_string = FrozenCore.sprintf(errmsg, matchee)
7775  * key_error_p = false
7776  * match_succeeded:
7777  */
7778  const int line = nd_line(node);
7779  const NODE *line_node = node;
7780  LABEL *match_succeeded = NEW_LABEL(line);
7781 
7782  ADD_INSN(ret, line_node, dup);
7783  ADD_INSNL(ret, line_node, branchif, match_succeeded);
7784 
7785  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7786  ADD_INSN1(ret, line_node, putobject, errmsg);
7787  ADD_INSN1(ret, line_node, topn, INT2FIX(3));
7788  ADD_SEND(ret, line_node, id_core_sprintf, INT2FIX(2)); // (1)
7789  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 /* (1) */)); // (2)
7790 
7791  ADD_INSN1(ret, line_node, putobject, Qfalse);
7792  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 /* (1), (2) */));
7793 
7794  ADD_INSN(ret, line_node, pop);
7795  ADD_INSN(ret, line_node, pop);
7796  ADD_LABEL(ret, match_succeeded);
7797 
7798  return COMPILE_OK;
7799 }
7800 
7801 static int
7802 iseq_compile_pattern_set_length_errmsg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, VALUE errmsg, VALUE pattern_length, int base_index)
7803 {
7804  /*
7805  * if match_succeeded?
7806  * goto match_succeeded
7807  * end
7808  * error_string = FrozenCore.sprintf(errmsg, matchee, matchee.length, pat.length)
7809  * key_error_p = false
7810  * match_succeeded:
7811  */
7812  const int line = nd_line(node);
7813  const NODE *line_node = node;
7814  LABEL *match_succeeded = NEW_LABEL(line);
7815 
7816  ADD_INSN(ret, line_node, dup);
7817  ADD_INSNL(ret, line_node, branchif, match_succeeded);
7818 
7819  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7820  ADD_INSN1(ret, line_node, putobject, errmsg);
7821  ADD_INSN1(ret, line_node, topn, INT2FIX(3));
7822  ADD_INSN(ret, line_node, dup);
7823  ADD_SEND(ret, line_node, idLength, INT2FIX(0));
7824  ADD_INSN1(ret, line_node, putobject, pattern_length);
7825  ADD_SEND(ret, line_node, id_core_sprintf, INT2FIX(4)); // (1)
7826  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 /* (1) */)); // (2)
7827 
7828  ADD_INSN1(ret, line_node, putobject, Qfalse);
7829  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2/* (1), (2) */));
7830 
7831  ADD_INSN(ret, line_node, pop);
7832  ADD_INSN(ret, line_node, pop);
7833  ADD_LABEL(ret, match_succeeded);
7834 
7835  return COMPILE_OK;
7836 }
7837 
7838 static int
7839 iseq_compile_pattern_set_eqq_errmsg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int base_index)
7840 {
7841  /*
7842  * if match_succeeded?
7843  * goto match_succeeded
7844  * end
7845  * error_string = FrozenCore.sprintf("%p === %p does not return true", pat, matchee)
7846  * key_error_p = false
7847  * match_succeeded:
7848  */
7849  const int line = nd_line(node);
7850  const NODE *line_node = node;
7851  LABEL *match_succeeded = NEW_LABEL(line);
7852 
7853  ADD_INSN(ret, line_node, dup);
7854  ADD_INSNL(ret, line_node, branchif, match_succeeded);
7855 
7856  ADD_INSN1(ret, line_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7857  ADD_INSN1(ret, line_node, putobject, rb_fstring_lit("%p === %p does not return true"));
7858  ADD_INSN1(ret, line_node, topn, INT2FIX(3));
7859  ADD_INSN1(ret, line_node, topn, INT2FIX(5));
7860  ADD_SEND(ret, line_node, id_core_sprintf, INT2FIX(3)); // (1)
7861  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_ERROR_STRING + 1 /* (1) */)); // (2)
7862 
7863  ADD_INSN1(ret, line_node, putobject, Qfalse);
7864  ADD_INSN1(ret, line_node, setn, INT2FIX(base_index + CASE3_BI_OFFSET_KEY_ERROR_P + 2 /* (1), (2) */));
7865 
7866  ADD_INSN(ret, line_node, pop);
7867  ADD_INSN(ret, line_node, pop);
7868 
7869  ADD_LABEL(ret, match_succeeded);
7870  ADD_INSN1(ret, line_node, setn, INT2FIX(2));
7871  ADD_INSN(ret, line_node, pop);
7872  ADD_INSN(ret, line_node, pop);
7873 
7874  return COMPILE_OK;
7875 }
7876 
7877 static int
7878 compile_case3(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const orig_node, int popped)
7879 {
7880  const NODE *pattern;
7881  const NODE *node = orig_node;
7882  LABEL *endlabel, *elselabel;
7883  DECL_ANCHOR(head);
7884  DECL_ANCHOR(body_seq);
7885  DECL_ANCHOR(cond_seq);
7886  int line;
7887  enum node_type type;
7888  const NODE *line_node;
7889  VALUE branches = 0;
7890  int branch_id = 0;
7891  bool single_pattern;
7892 
7893  INIT_ANCHOR(head);
7894  INIT_ANCHOR(body_seq);
7895  INIT_ANCHOR(cond_seq);
7896 
7897  branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node), "case");
7898 
7899  node = RNODE_CASE3(node)->nd_body;
7900  EXPECT_NODE("NODE_CASE3", node, NODE_IN, COMPILE_NG);
7901  type = nd_type(node);
7902  line = nd_line(node);
7903  line_node = node;
7904  single_pattern = !RNODE_IN(node)->nd_next;
7905 
7906  endlabel = NEW_LABEL(line);
7907  elselabel = NEW_LABEL(line);
7908 
7909  if (single_pattern) {
7910  /* allocate stack for ... */
7911  ADD_INSN(head, line_node, putnil); /* key_error_key */
7912  ADD_INSN(head, line_node, putnil); /* key_error_matchee */
7913  ADD_INSN1(head, line_node, putobject, Qfalse); /* key_error_p */
7914  ADD_INSN(head, line_node, putnil); /* error_string */
7915  }
7916  ADD_INSN(head, line_node, putnil); /* allocate stack for cached #deconstruct value */
7917 
7918  CHECK(COMPILE(head, "case base", RNODE_CASE3(orig_node)->nd_head));
7919 
7920  ADD_SEQ(ret, head); /* case VAL */
7921 
7922  while (type == NODE_IN) {
7923  LABEL *l1;
7924 
7925  if (branch_id) {
7926  ADD_INSN(body_seq, line_node, putnil);
7927  }
7928  l1 = NEW_LABEL(line);
7929  ADD_LABEL(body_seq, l1);
7930  ADD_INSN1(body_seq, line_node, adjuststack, INT2FIX(single_pattern ? 6 : 2));
7931 
7932  const NODE *const coverage_node = RNODE_IN(node)->nd_body ? RNODE_IN(node)->nd_body : node;
7933  add_trace_branch_coverage(
7934  iseq,
7935  body_seq,
7936  nd_code_loc(coverage_node),
7937  nd_node_id(coverage_node),
7938  branch_id++,
7939  "in",
7940  branches);
7941 
7942  CHECK(COMPILE_(body_seq, "in body", RNODE_IN(node)->nd_body, popped));
7943  ADD_INSNL(body_seq, line_node, jump, endlabel);
7944 
7945  pattern = RNODE_IN(node)->nd_head;
7946  if (pattern) {
7947  int pat_line = nd_line(pattern);
7948  LABEL *next_pat = NEW_LABEL(pat_line);
7949  ADD_INSN (cond_seq, pattern, dup); /* dup case VAL */
7950  // NOTE: set base_index (it's "under" the matchee value, so it's position is 2)
7951  CHECK(iseq_compile_pattern_each(iseq, cond_seq, pattern, l1, next_pat, single_pattern, false, 2, true));
7952  ADD_LABEL(cond_seq, next_pat);
7953  LABEL_UNREMOVABLE(next_pat);
7954  }
7955  else {
7956  COMPILE_ERROR(ERROR_ARGS "unexpected node");
7957  return COMPILE_NG;
7958  }
7959 
7960  node = RNODE_IN(node)->nd_next;
7961  if (!node) {
7962  break;
7963  }
7964  type = nd_type(node);
7965  line = nd_line(node);
7966  line_node = node;
7967  }
7968  /* else */
7969  if (node) {
7970  ADD_LABEL(cond_seq, elselabel);
7971  ADD_INSN(cond_seq, line_node, pop);
7972  ADD_INSN(cond_seq, line_node, pop); /* discard cached #deconstruct value */
7973  add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(node), nd_node_id(node), branch_id, "else", branches);
7974  CHECK(COMPILE_(cond_seq, "else", node, popped));
7975  ADD_INSNL(cond_seq, line_node, jump, endlabel);
7976  ADD_INSN(cond_seq, line_node, putnil);
7977  if (popped) {
7978  ADD_INSN(cond_seq, line_node, putnil);
7979  }
7980  }
7981  else {
7982  debugs("== else (implicit)\n");
7983  ADD_LABEL(cond_seq, elselabel);
7984  add_trace_branch_coverage(iseq, cond_seq, nd_code_loc(orig_node), nd_node_id(orig_node), branch_id, "else", branches);
7985  ADD_INSN1(cond_seq, orig_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7986 
7987  if (single_pattern) {
7988  /*
7989  * if key_error_p
7990  * FrozenCore.raise NoMatchingPatternKeyError.new(FrozenCore.sprintf("%p: %s", case_val, error_string), matchee: key_error_matchee, key: key_error_key)
7991  * else
7992  * FrozenCore.raise NoMatchingPatternError, FrozenCore.sprintf("%p: %s", case_val, error_string)
7993  * end
7994  */
7995  LABEL *key_error, *fin;
7996  struct rb_callinfo_kwarg *kw_arg;
7997 
7998  key_error = NEW_LABEL(line);
7999  fin = NEW_LABEL(line);
8000 
8001  kw_arg = rb_xmalloc_mul_add(2, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
8002  kw_arg->references = 0;
8003  kw_arg->keyword_len = 2;
8004  kw_arg->keywords[0] = ID2SYM(rb_intern("matchee"));
8005  kw_arg->keywords[1] = ID2SYM(rb_intern("key"));
8006 
8007  ADD_INSN1(cond_seq, orig_node, topn, INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_P + 2));
8008  ADD_INSNL(cond_seq, orig_node, branchif, key_error);
8009  ADD_INSN1(cond_seq, orig_node, putobject, rb_eNoMatchingPatternError);
8010  ADD_INSN1(cond_seq, orig_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8011  ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit("%p: %s"));
8012  ADD_INSN1(cond_seq, orig_node, topn, INT2FIX(4)); /* case VAL */
8013  ADD_INSN1(cond_seq, orig_node, topn, INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8014  ADD_SEND(cond_seq, orig_node, id_core_sprintf, INT2FIX(3));
8015  ADD_SEND(cond_seq, orig_node, id_core_raise, INT2FIX(2));
8016  ADD_INSNL(cond_seq, orig_node, jump, fin);
8017 
8018  ADD_LABEL(cond_seq, key_error);
8019  ADD_INSN1(cond_seq, orig_node, putobject, rb_eNoMatchingPatternKeyError);
8020  ADD_INSN1(cond_seq, orig_node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8021  ADD_INSN1(cond_seq, orig_node, putobject, rb_fstring_lit("%p: %s"));
8022  ADD_INSN1(cond_seq, orig_node, topn, INT2FIX(4)); /* case VAL */
8023  ADD_INSN1(cond_seq, orig_node, topn, INT2FIX(CASE3_BI_OFFSET_ERROR_STRING + 6));
8024  ADD_SEND(cond_seq, orig_node, id_core_sprintf, INT2FIX(3));
8025  ADD_INSN1(cond_seq, orig_node, topn, INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_MATCHEE + 4));
8026  ADD_INSN1(cond_seq, orig_node, topn, INT2FIX(CASE3_BI_OFFSET_KEY_ERROR_KEY + 5));
8027  ADD_SEND_R(cond_seq, orig_node, rb_intern("new"), INT2FIX(1), NULL, INT2FIX(VM_CALL_KWARG), kw_arg);
8028  ADD_SEND(cond_seq, orig_node, id_core_raise, INT2FIX(1));
8029 
8030  ADD_LABEL(cond_seq, fin);
8031  }
8032  else {
8033  ADD_INSN1(cond_seq, orig_node, putobject, rb_eNoMatchingPatternError);
8034  ADD_INSN1(cond_seq, orig_node, topn, INT2FIX(2));
8035  ADD_SEND(cond_seq, orig_node, id_core_raise, INT2FIX(2));
8036  }
8037  ADD_INSN1(cond_seq, orig_node, adjuststack, INT2FIX(single_pattern ? 7 : 3));
8038  if (!popped) {
8039  ADD_INSN(cond_seq, orig_node, putnil);
8040  }
8041  ADD_INSNL(cond_seq, orig_node, jump, endlabel);
8042  ADD_INSN1(cond_seq, orig_node, dupn, INT2FIX(single_pattern ? 5 : 1));
8043  if (popped) {
8044  ADD_INSN(cond_seq, line_node, putnil);
8045  }
8046  }
8047 
8048  ADD_SEQ(ret, cond_seq);
8049  ADD_SEQ(ret, body_seq);
8050  ADD_LABEL(ret, endlabel);
8051  return COMPILE_OK;
8052 }
8053 
8054 #undef CASE3_BI_OFFSET_DECONSTRUCTED_CACHE
8055 #undef CASE3_BI_OFFSET_ERROR_STRING
8056 #undef CASE3_BI_OFFSET_KEY_ERROR_P
8057 #undef CASE3_BI_OFFSET_KEY_ERROR_MATCHEE
8058 #undef CASE3_BI_OFFSET_KEY_ERROR_KEY
8059 
8060 static int
8061 compile_loop(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped, const enum node_type type)
8062 {
8063  const int line = (int)nd_line(node);
8064  const NODE *line_node = node;
8065 
8066  LABEL *prev_start_label = ISEQ_COMPILE_DATA(iseq)->start_label;
8067  LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
8068  LABEL *prev_redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label;
8069  int prev_loopval_popped = ISEQ_COMPILE_DATA(iseq)->loopval_popped;
8070  VALUE branches = Qfalse;
8071 
8073 
8074  LABEL *next_label = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(line); /* next */
8075  LABEL *redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label = NEW_LABEL(line); /* redo */
8076  LABEL *break_label = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(line); /* break */
8077  LABEL *end_label = NEW_LABEL(line);
8078  LABEL *adjust_label = NEW_LABEL(line);
8079 
8080  LABEL *next_catch_label = NEW_LABEL(line);
8081  LABEL *tmp_label = NULL;
8082 
8083  ISEQ_COMPILE_DATA(iseq)->loopval_popped = 0;
8084  push_ensure_entry(iseq, &enl, NULL, NULL);
8085 
8086  if (RNODE_WHILE(node)->nd_state == 1) {
8087  ADD_INSNL(ret, line_node, jump, next_label);
8088  }
8089  else {
8090  tmp_label = NEW_LABEL(line);
8091  ADD_INSNL(ret, line_node, jump, tmp_label);
8092  }
8093  ADD_LABEL(ret, adjust_label);
8094  ADD_INSN(ret, line_node, putnil);
8095  ADD_LABEL(ret, next_catch_label);
8096  ADD_INSN(ret, line_node, pop);
8097  ADD_INSNL(ret, line_node, jump, next_label);
8098  if (tmp_label) ADD_LABEL(ret, tmp_label);
8099 
8100  ADD_LABEL(ret, redo_label);
8101  branches = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node), type == NODE_WHILE ? "while" : "until");
8102 
8103  const NODE *const coverage_node = RNODE_WHILE(node)->nd_body ? RNODE_WHILE(node)->nd_body : node;
8104  add_trace_branch_coverage(
8105  iseq,
8106  ret,
8107  nd_code_loc(coverage_node),
8108  nd_node_id(coverage_node),
8109  0,
8110  "body",
8111  branches);
8112 
8113  CHECK(COMPILE_POPPED(ret, "while body", RNODE_WHILE(node)->nd_body));
8114  ADD_LABEL(ret, next_label); /* next */
8115 
8116  if (type == NODE_WHILE) {
8117  CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8118  redo_label, end_label));
8119  }
8120  else {
8121  /* until */
8122  CHECK(compile_branch_condition(iseq, ret, RNODE_WHILE(node)->nd_cond,
8123  end_label, redo_label));
8124  }
8125 
8126  ADD_LABEL(ret, end_label);
8127  ADD_ADJUST_RESTORE(ret, adjust_label);
8128 
8129  if (UNDEF_P(RNODE_WHILE(node)->nd_state)) {
8130  /* ADD_INSN(ret, line_node, putundef); */
8131  COMPILE_ERROR(ERROR_ARGS "unsupported: putundef");
8132  return COMPILE_NG;
8133  }
8134  else {
8135  ADD_INSN(ret, line_node, putnil);
8136  }
8137 
8138  ADD_LABEL(ret, break_label); /* break */
8139 
8140  if (popped) {
8141  ADD_INSN(ret, line_node, pop);
8142  }
8143 
8144  ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, redo_label, break_label, NULL,
8145  break_label);
8146  ADD_CATCH_ENTRY(CATCH_TYPE_NEXT, redo_label, break_label, NULL,
8147  next_catch_label);
8148  ADD_CATCH_ENTRY(CATCH_TYPE_REDO, redo_label, break_label, NULL,
8149  ISEQ_COMPILE_DATA(iseq)->redo_label);
8150 
8151  ISEQ_COMPILE_DATA(iseq)->start_label = prev_start_label;
8152  ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
8153  ISEQ_COMPILE_DATA(iseq)->redo_label = prev_redo_label;
8154  ISEQ_COMPILE_DATA(iseq)->loopval_popped = prev_loopval_popped;
8155  ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->prev;
8156  return COMPILE_OK;
8157 }
8158 
8159 static int
8160 compile_iter(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8161 {
8162  const int line = nd_line(node);
8163  const NODE *line_node = node;
8164  const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
8165  LABEL *retry_label = NEW_LABEL(line);
8166  LABEL *retry_end_l = NEW_LABEL(line);
8167  const rb_iseq_t *child_iseq;
8168 
8169  ADD_LABEL(ret, retry_label);
8170  if (nd_type_p(node, NODE_FOR)) {
8171  CHECK(COMPILE(ret, "iter caller (for)", RNODE_FOR(node)->nd_iter));
8172 
8173  ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8174  NEW_CHILD_ISEQ(RNODE_FOR(node)->nd_body, make_name_for_block(iseq),
8175  ISEQ_TYPE_BLOCK, line);
8176  ADD_SEND_WITH_BLOCK(ret, line_node, idEach, INT2FIX(0), child_iseq);
8177  }
8178  else {
8179  ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq =
8180  NEW_CHILD_ISEQ(RNODE_ITER(node)->nd_body, make_name_for_block(iseq),
8181  ISEQ_TYPE_BLOCK, line);
8182  CHECK(COMPILE(ret, "iter caller", RNODE_ITER(node)->nd_iter));
8183  }
8184 
8185  {
8186  // We need to put the label "retry_end_l" immediately after the last "send" instruction.
8187  // This because vm_throw checks if the break cont is equal to the index of next insn of the "send".
8188  // (Otherwise, it is considered "break from proc-closure". See "TAG_BREAK" handling in "vm_throw_start".)
8189  //
8190  // Normally, "send" instruction is at the last.
8191  // However, qcall under branch coverage measurement adds some instructions after the "send".
8192  //
8193  // Note that "invokesuper", "invokesuperforward" appears instead of "send".
8194  INSN *iobj;
8195  LINK_ELEMENT *last_elem = LAST_ELEMENT(ret);
8196  iobj = IS_INSN(last_elem) ? (INSN*) last_elem : (INSN*) get_prev_insn((INSN*) last_elem);
8197  while (!IS_INSN_ID(iobj, send) && !IS_INSN_ID(iobj, invokesuper) && !IS_INSN_ID(iobj, sendforward) && !IS_INSN_ID(iobj, invokesuperforward)) {
8198  iobj = (INSN*) get_prev_insn(iobj);
8199  }
8200  ELEM_INSERT_NEXT(&iobj->link, (LINK_ELEMENT*) retry_end_l);
8201 
8202  // LINK_ANCHOR has a pointer to the last element, but ELEM_INSERT_NEXT does not update it
8203  // even if we add an insn to the last of LINK_ANCHOR. So this updates it manually.
8204  if (&iobj->link == LAST_ELEMENT(ret)) {
8205  ret->last = (LINK_ELEMENT*) retry_end_l;
8206  }
8207  }
8208 
8209  if (popped) {
8210  ADD_INSN(ret, line_node, pop);
8211  }
8212 
8213  ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
8214 
8215  ADD_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, child_iseq, retry_end_l);
8216  return COMPILE_OK;
8217 }
8218 
8219 static int
8220 compile_for_masgn(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8221 {
8222  /* massign to var in "for"
8223  * (args.length == 1 && Array.try_convert(args[0])) || args
8224  */
8225  const NODE *line_node = node;
8226  const NODE *var = RNODE_FOR_MASGN(node)->nd_var;
8227  LABEL *not_single = NEW_LABEL(nd_line(var));
8228  LABEL *not_ary = NEW_LABEL(nd_line(var));
8229  CHECK(COMPILE(ret, "for var", var));
8230  ADD_INSN(ret, line_node, dup);
8231  ADD_CALL(ret, line_node, idLength, INT2FIX(0));
8232  ADD_INSN1(ret, line_node, putobject, INT2FIX(1));
8233  ADD_CALL(ret, line_node, idEq, INT2FIX(1));
8234  ADD_INSNL(ret, line_node, branchunless, not_single);
8235  ADD_INSN(ret, line_node, dup);
8236  ADD_INSN1(ret, line_node, putobject, INT2FIX(0));
8237  ADD_CALL(ret, line_node, idAREF, INT2FIX(1));
8238  ADD_INSN1(ret, line_node, putobject, rb_cArray);
8239  ADD_INSN(ret, line_node, swap);
8240  ADD_CALL(ret, line_node, rb_intern("try_convert"), INT2FIX(1));
8241  ADD_INSN(ret, line_node, dup);
8242  ADD_INSNL(ret, line_node, branchunless, not_ary);
8243  ADD_INSN(ret, line_node, swap);
8244  ADD_LABEL(ret, not_ary);
8245  ADD_INSN(ret, line_node, pop);
8246  ADD_LABEL(ret, not_single);
8247  return COMPILE_OK;
8248 }
8249 
8250 static int
8251 compile_break(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8252 {
8253  const NODE *line_node = node;
8254  unsigned long throw_flag = 0;
8255 
8256  if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8257  /* while/until */
8258  LABEL *splabel = NEW_LABEL(0);
8259  ADD_LABEL(ret, splabel);
8260  ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8261  CHECK(COMPILE_(ret, "break val (while/until)", RNODE_BREAK(node)->nd_stts,
8262  ISEQ_COMPILE_DATA(iseq)->loopval_popped));
8263  add_ensure_iseq(ret, iseq, 0);
8264  ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8265  ADD_ADJUST_RESTORE(ret, splabel);
8266 
8267  if (!popped) {
8268  ADD_INSN(ret, line_node, putnil);
8269  }
8270  }
8271  else {
8272  const rb_iseq_t *ip = iseq;
8273 
8274  while (ip) {
8275  if (!ISEQ_COMPILE_DATA(ip)) {
8276  ip = 0;
8277  break;
8278  }
8279 
8280  if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8281  throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8282  }
8283  else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
8284  throw_flag = 0;
8285  }
8286  else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
8287  COMPILE_ERROR(ERROR_ARGS "Can't escape from eval with break");
8288  return COMPILE_NG;
8289  }
8290  else {
8291  ip = ISEQ_BODY(ip)->parent_iseq;
8292  continue;
8293  }
8294 
8295  /* escape from block */
8296  CHECK(COMPILE(ret, "break val (block)", RNODE_BREAK(node)->nd_stts));
8297  ADD_INSN1(ret, line_node, throw, INT2FIX(throw_flag | TAG_BREAK));
8298  if (popped) {
8299  ADD_INSN(ret, line_node, pop);
8300  }
8301  return COMPILE_OK;
8302  }
8303  COMPILE_ERROR(ERROR_ARGS "Invalid break");
8304  return COMPILE_NG;
8305  }
8306  return COMPILE_OK;
8307 }
8308 
8309 static int
8310 compile_next(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8311 {
8312  const NODE *line_node = node;
8313  unsigned long throw_flag = 0;
8314 
8315  if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8316  LABEL *splabel = NEW_LABEL(0);
8317  debugs("next in while loop\n");
8318  ADD_LABEL(ret, splabel);
8319  CHECK(COMPILE(ret, "next val/valid syntax?", RNODE_NEXT(node)->nd_stts));
8320  add_ensure_iseq(ret, iseq, 0);
8321  ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8322  ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8323  ADD_ADJUST_RESTORE(ret, splabel);
8324  if (!popped) {
8325  ADD_INSN(ret, line_node, putnil);
8326  }
8327  }
8328  else if (ISEQ_COMPILE_DATA(iseq)->end_label && can_add_ensure_iseq(iseq)) {
8329  LABEL *splabel = NEW_LABEL(0);
8330  debugs("next in block\n");
8331  ADD_LABEL(ret, splabel);
8332  ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8333  CHECK(COMPILE(ret, "next val", RNODE_NEXT(node)->nd_stts));
8334  add_ensure_iseq(ret, iseq, 0);
8335  ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8336  ADD_ADJUST_RESTORE(ret, splabel);
8337 
8338  if (!popped) {
8339  ADD_INSN(ret, line_node, putnil);
8340  }
8341  }
8342  else {
8343  const rb_iseq_t *ip = iseq;
8344 
8345  while (ip) {
8346  if (!ISEQ_COMPILE_DATA(ip)) {
8347  ip = 0;
8348  break;
8349  }
8350 
8351  throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8352  if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8353  /* while loop */
8354  break;
8355  }
8356  else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
8357  break;
8358  }
8359  else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
8360  COMPILE_ERROR(ERROR_ARGS "Can't escape from eval with next");
8361  return COMPILE_NG;
8362  }
8363 
8364  ip = ISEQ_BODY(ip)->parent_iseq;
8365  }
8366  if (ip != 0) {
8367  CHECK(COMPILE(ret, "next val", RNODE_NEXT(node)->nd_stts));
8368  ADD_INSN1(ret, line_node, throw, INT2FIX(throw_flag | TAG_NEXT));
8369 
8370  if (popped) {
8371  ADD_INSN(ret, line_node, pop);
8372  }
8373  }
8374  else {
8375  COMPILE_ERROR(ERROR_ARGS "Invalid next");
8376  return COMPILE_NG;
8377  }
8378  }
8379  return COMPILE_OK;
8380 }
8381 
8382 static int
8383 compile_redo(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8384 {
8385  const NODE *line_node = node;
8386 
8387  if (ISEQ_COMPILE_DATA(iseq)->redo_label && can_add_ensure_iseq(iseq)) {
8388  LABEL *splabel = NEW_LABEL(0);
8389  debugs("redo in while");
8390  ADD_LABEL(ret, splabel);
8391  ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->redo_label);
8392  add_ensure_iseq(ret, iseq, 0);
8393  ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->redo_label);
8394  ADD_ADJUST_RESTORE(ret, splabel);
8395  if (!popped) {
8396  ADD_INSN(ret, line_node, putnil);
8397  }
8398  }
8399  else if (ISEQ_BODY(iseq)->type != ISEQ_TYPE_EVAL && ISEQ_COMPILE_DATA(iseq)->start_label && can_add_ensure_iseq(iseq)) {
8400  LABEL *splabel = NEW_LABEL(0);
8401 
8402  debugs("redo in block");
8403  ADD_LABEL(ret, splabel);
8404  add_ensure_iseq(ret, iseq, 0);
8405  ADD_ADJUST(ret, line_node, ISEQ_COMPILE_DATA(iseq)->start_label);
8406  ADD_INSNL(ret, line_node, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8407  ADD_ADJUST_RESTORE(ret, splabel);
8408 
8409  if (!popped) {
8410  ADD_INSN(ret, line_node, putnil);
8411  }
8412  }
8413  else {
8414  const rb_iseq_t *ip = iseq;
8415 
8416  while (ip) {
8417  if (!ISEQ_COMPILE_DATA(ip)) {
8418  ip = 0;
8419  break;
8420  }
8421 
8422  if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8423  break;
8424  }
8425  else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
8426  break;
8427  }
8428  else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
8429  COMPILE_ERROR(ERROR_ARGS "Can't escape from eval with redo");
8430  return COMPILE_NG;
8431  }
8432 
8433  ip = ISEQ_BODY(ip)->parent_iseq;
8434  }
8435  if (ip != 0) {
8436  ADD_INSN(ret, line_node, putnil);
8437  ADD_INSN1(ret, line_node, throw, INT2FIX(VM_THROW_NO_ESCAPE_FLAG | TAG_REDO));
8438 
8439  if (popped) {
8440  ADD_INSN(ret, line_node, pop);
8441  }
8442  }
8443  else {
8444  COMPILE_ERROR(ERROR_ARGS "Invalid redo");
8445  return COMPILE_NG;
8446  }
8447  }
8448  return COMPILE_OK;
8449 }
8450 
8451 static int
8452 compile_retry(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8453 {
8454  const NODE *line_node = node;
8455 
8456  if (ISEQ_BODY(iseq)->type == ISEQ_TYPE_RESCUE) {
8457  ADD_INSN(ret, line_node, putnil);
8458  ADD_INSN1(ret, line_node, throw, INT2FIX(TAG_RETRY));
8459 
8460  if (popped) {
8461  ADD_INSN(ret, line_node, pop);
8462  }
8463  }
8464  else {
8465  COMPILE_ERROR(ERROR_ARGS "Invalid retry");
8466  return COMPILE_NG;
8467  }
8468  return COMPILE_OK;
8469 }
8470 
8471 static int
8472 compile_rescue(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8473 {
8474  const int line = nd_line(node);
8475  const NODE *line_node = node;
8476  LABEL *lstart = NEW_LABEL(line);
8477  LABEL *lend = NEW_LABEL(line);
8478  LABEL *lcont = NEW_LABEL(line);
8479  const rb_iseq_t *rescue = NEW_CHILD_ISEQ(RNODE_RESCUE(node)->nd_resq,
8480  rb_str_concat(rb_str_new2("rescue in "),
8481  ISEQ_BODY(iseq)->location.label),
8482  ISEQ_TYPE_RESCUE, line);
8483 
8484  lstart->rescued = LABEL_RESCUE_BEG;
8485  lend->rescued = LABEL_RESCUE_END;
8486  ADD_LABEL(ret, lstart);
8487 
8488  bool prev_in_rescue = ISEQ_COMPILE_DATA(iseq)->in_rescue;
8489  ISEQ_COMPILE_DATA(iseq)->in_rescue = true;
8490  {
8491  CHECK(COMPILE(ret, "rescue head", RNODE_RESCUE(node)->nd_head));
8492  }
8493  ISEQ_COMPILE_DATA(iseq)->in_rescue = prev_in_rescue;
8494 
8495  ADD_LABEL(ret, lend);
8496  if (RNODE_RESCUE(node)->nd_else) {
8497  ADD_INSN(ret, line_node, pop);
8498  CHECK(COMPILE(ret, "rescue else", RNODE_RESCUE(node)->nd_else));
8499  }
8500  ADD_INSN(ret, line_node, nop);
8501  ADD_LABEL(ret, lcont);
8502 
8503  if (popped) {
8504  ADD_INSN(ret, line_node, pop);
8505  }
8506 
8507  /* register catch entry */
8508  ADD_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lcont);
8509  ADD_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
8510  return COMPILE_OK;
8511 }
8512 
8513 static int
8514 compile_resbody(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8515 {
8516  const int line = nd_line(node);
8517  const NODE *line_node = node;
8518  const NODE *resq = node;
8519  const NODE *narg;
8520  LABEL *label_miss, *label_hit;
8521 
8522  while (resq) {
8523  label_miss = NEW_LABEL(line);
8524  label_hit = NEW_LABEL(line);
8525 
8526  narg = RNODE_RESBODY(resq)->nd_args;
8527  if (narg) {
8528  switch (nd_type(narg)) {
8529  case NODE_LIST:
8530  while (narg) {
8531  ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8532  CHECK(COMPILE(ret, "rescue arg", RNODE_LIST(narg)->nd_head));
8533  ADD_INSN1(ret, line_node, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8534  ADD_INSNL(ret, line_node, branchif, label_hit);
8535  narg = RNODE_LIST(narg)->nd_next;
8536  }
8537  break;
8538  case NODE_SPLAT:
8539  case NODE_ARGSCAT:
8540  case NODE_ARGSPUSH:
8541  ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8542  CHECK(COMPILE(ret, "rescue/cond splat", narg));
8543  ADD_INSN1(ret, line_node, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE | VM_CHECKMATCH_ARRAY));
8544  ADD_INSNL(ret, line_node, branchif, label_hit);
8545  break;
8546  default:
8547  UNKNOWN_NODE("NODE_RESBODY", narg, COMPILE_NG);
8548  }
8549  }
8550  else {
8551  ADD_GETLOCAL(ret, line_node, LVAR_ERRINFO, 0);
8552  ADD_INSN1(ret, line_node, putobject, rb_eStandardError);
8553  ADD_INSN1(ret, line_node, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8554  ADD_INSNL(ret, line_node, branchif, label_hit);
8555  }
8556  ADD_INSNL(ret, line_node, jump, label_miss);
8557  ADD_LABEL(ret, label_hit);
8558  ADD_TRACE(ret, RUBY_EVENT_RESCUE);
8559 
8560  if (RNODE_RESBODY(resq)->nd_exc_var) {
8561  CHECK(COMPILE_POPPED(ret, "resbody exc_var", RNODE_RESBODY(resq)->nd_exc_var));
8562  }
8563 
8564  if (nd_type(RNODE_RESBODY(resq)->nd_body) == NODE_BEGIN && RNODE_BEGIN(RNODE_RESBODY(resq)->nd_body)->nd_body == NULL && !RNODE_RESBODY(resq)->nd_exc_var) {
8565  // empty body
8566  ADD_SYNTHETIC_INSN(ret, nd_line(RNODE_RESBODY(resq)->nd_body), -1, putnil);
8567  }
8568  else {
8569  CHECK(COMPILE(ret, "resbody body", RNODE_RESBODY(resq)->nd_body));
8570  }
8571 
8572  if (ISEQ_COMPILE_DATA(iseq)->option->tailcall_optimization) {
8573  ADD_INSN(ret, line_node, nop);
8574  }
8575  ADD_INSN(ret, line_node, leave);
8576  ADD_LABEL(ret, label_miss);
8577  resq = RNODE_RESBODY(resq)->nd_next;
8578  }
8579  return COMPILE_OK;
8580 }
8581 
8582 static int
8583 compile_ensure(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8584 {
8585  const int line = nd_line(RNODE_ENSURE(node)->nd_ensr);
8586  const NODE *line_node = node;
8587  DECL_ANCHOR(ensr);
8588  const rb_iseq_t *ensure = NEW_CHILD_ISEQ(RNODE_ENSURE(node)->nd_ensr,
8589  rb_str_concat(rb_str_new2 ("ensure in "), ISEQ_BODY(iseq)->location.label),
8590  ISEQ_TYPE_ENSURE, line);
8591  LABEL *lstart = NEW_LABEL(line);
8592  LABEL *lend = NEW_LABEL(line);
8593  LABEL *lcont = NEW_LABEL(line);
8594  LINK_ELEMENT *last;
8595  int last_leave = 0;
8596  struct ensure_range er;
8598  struct ensure_range *erange;
8599 
8600  INIT_ANCHOR(ensr);
8601  CHECK(COMPILE_POPPED(ensr, "ensure ensr", RNODE_ENSURE(node)->nd_ensr));
8602  last = ensr->last;
8603  last_leave = last && IS_INSN(last) && IS_INSN_ID(last, leave);
8604 
8605  er.begin = lstart;
8606  er.end = lend;
8607  er.next = 0;
8608  push_ensure_entry(iseq, &enl, &er, RNODE_ENSURE(node)->nd_ensr);
8609 
8610  ADD_LABEL(ret, lstart);
8611  CHECK(COMPILE_(ret, "ensure head", RNODE_ENSURE(node)->nd_head, (popped | last_leave)));
8612  ADD_LABEL(ret, lend);
8613  ADD_SEQ(ret, ensr);
8614  if (!popped && last_leave) ADD_INSN(ret, line_node, putnil);
8615  ADD_LABEL(ret, lcont);
8616  if (last_leave) ADD_INSN(ret, line_node, pop);
8617 
8618  erange = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->erange;
8619  if (lstart->link.next != &lend->link) {
8620  while (erange) {
8621  ADD_CATCH_ENTRY(CATCH_TYPE_ENSURE, erange->begin, erange->end,
8622  ensure, lcont);
8623  erange = erange->next;
8624  }
8625  }
8626 
8627  ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl.prev;
8628  return COMPILE_OK;
8629 }
8630 
8631 static int
8632 compile_return(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8633 {
8634  const NODE *line_node = node;
8635 
8636  if (iseq) {
8637  enum rb_iseq_type type = ISEQ_BODY(iseq)->type;
8638  const rb_iseq_t *is = iseq;
8639  enum rb_iseq_type t = type;
8640  const NODE *retval = RNODE_RETURN(node)->nd_stts;
8641  LABEL *splabel = 0;
8642 
8643  while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE) {
8644  if (!(is = ISEQ_BODY(is)->parent_iseq)) break;
8645  t = ISEQ_BODY(is)->type;
8646  }
8647  switch (t) {
8648  case ISEQ_TYPE_TOP:
8649  case ISEQ_TYPE_MAIN:
8650  if (retval) {
8651  rb_warn("argument of top-level return is ignored");
8652  }
8653  if (is == iseq) {
8654  /* plain top-level, leave directly */
8655  type = ISEQ_TYPE_METHOD;
8656  }
8657  break;
8658  default:
8659  break;
8660  }
8661 
8662  if (type == ISEQ_TYPE_METHOD) {
8663  splabel = NEW_LABEL(0);
8664  ADD_LABEL(ret, splabel);
8665  ADD_ADJUST(ret, line_node, 0);
8666  }
8667 
8668  CHECK(COMPILE(ret, "return nd_stts (return val)", retval));
8669 
8670  if (type == ISEQ_TYPE_METHOD && can_add_ensure_iseq(iseq)) {
8671  add_ensure_iseq(ret, iseq, 1);
8672  ADD_TRACE(ret, RUBY_EVENT_RETURN);
8673  ADD_INSN(ret, line_node, leave);
8674  ADD_ADJUST_RESTORE(ret, splabel);
8675 
8676  if (!popped) {
8677  ADD_INSN(ret, line_node, putnil);
8678  }
8679  }
8680  else {
8681  ADD_INSN1(ret, line_node, throw, INT2FIX(TAG_RETURN));
8682  if (popped) {
8683  ADD_INSN(ret, line_node, pop);
8684  }
8685  }
8686  }
8687  return COMPILE_OK;
8688 }
8689 
8690 static bool
8691 drop_unreachable_return(LINK_ANCHOR *ret)
8692 {
8693  LINK_ELEMENT *i = ret->last, *last;
8694  if (!i) return false;
8695  if (IS_TRACE(i)) i = i->prev;
8696  if (!IS_INSN(i) || !IS_INSN_ID(i, putnil)) return false;
8697  last = i = i->prev;
8698  if (IS_ADJUST(i)) i = i->prev;
8699  if (!IS_INSN(i)) return false;
8700  switch (INSN_OF(i)) {
8701  case BIN(leave):
8702  case BIN(jump):
8703  break;
8704  default:
8705  return false;
8706  }
8707  (ret->last = last->prev)->next = NULL;
8708  return true;
8709 }
8710 
8711 static int
8712 compile_evstr(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
8713 {
8714  CHECK(COMPILE_(ret, "nd_body", node, popped));
8715 
8716  if (!popped && !all_string_result_p(node)) {
8717  const NODE *line_node = node;
8718  const unsigned int flag = VM_CALL_FCALL;
8719 
8720  // Note, this dup could be removed if we are willing to change anytostring. It pops
8721  // two VALUEs off the stack when it could work by replacing the top most VALUE.
8722  ADD_INSN(ret, line_node, dup);
8723  ADD_INSN1(ret, line_node, objtostring, new_callinfo(iseq, idTo_s, 0, flag, NULL, FALSE));
8724  ADD_INSN(ret, line_node, anytostring);
8725  }
8726  return COMPILE_OK;
8727 }
8728 
8729 static void
8730 compile_lvar(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *line_node, ID id)
8731 {
8732  int idx = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size - get_local_var_idx(iseq, id);
8733 
8734  debugs("id: %s idx: %d\n", rb_id2name(id), idx);
8735  ADD_GETLOCAL(ret, line_node, idx, get_lvar_level(iseq));
8736 }
8737 
8738 static LABEL *
8739 qcall_branch_start(rb_iseq_t *iseq, LINK_ANCHOR *const recv, VALUE *branches, const NODE *node, const NODE *line_node)
8740 {
8741  LABEL *else_label = NEW_LABEL(nd_line(line_node));
8742  VALUE br = 0;
8743 
8744  br = decl_branch_base(iseq, PTR2NUM(node), nd_code_loc(node), "&.");
8745  *branches = br;
8746  ADD_INSN(recv, line_node, dup);
8747  ADD_INSNL(recv, line_node, branchnil, else_label);
8748  add_trace_branch_coverage(iseq, recv, nd_code_loc(node), nd_node_id(node), 0, "then", br);
8749  return else_label;
8750 }
8751 
8752 static void
8753 qcall_branch_end(rb_iseq_t *iseq, LINK_ANCHOR *const ret, LABEL *else_label, VALUE branches, const NODE *node, const NODE *line_node)
8754 {
8755  LABEL *end_label;
8756  if (!else_label) return;
8757  end_label = NEW_LABEL(nd_line(line_node));
8758  ADD_INSNL(ret, line_node, jump, end_label);
8759  ADD_LABEL(ret, else_label);
8760  add_trace_branch_coverage(iseq, ret, nd_code_loc(node), nd_node_id(node), 1, "else", branches);
8761  ADD_LABEL(ret, end_label);
8762 }
8763 
8764 static int
8765 compile_call_precheck_freeze(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, const NODE *line_node, int popped)
8766 {
8767  /* optimization shortcut
8768  * "literal".freeze -> opt_str_freeze("literal")
8769  */
8770  if (get_nd_recv(node) &&
8771  (nd_type_p(get_nd_recv(node), NODE_STR) || nd_type_p(get_nd_recv(node), NODE_FILE)) &&
8772  (get_node_call_nd_mid(node) == idFreeze || get_node_call_nd_mid(node) == idUMinus) &&
8773  get_nd_args(node) == NULL &&
8774  ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
8775  ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
8776  VALUE str = get_string_value(get_nd_recv(node));
8777  if (get_node_call_nd_mid(node) == idUMinus) {
8778  ADD_INSN2(ret, line_node, opt_str_uminus, str,
8779  new_callinfo(iseq, idUMinus, 0, 0, NULL, FALSE));
8780  }
8781  else {
8782  ADD_INSN2(ret, line_node, opt_str_freeze, str,
8783  new_callinfo(iseq, idFreeze, 0, 0, NULL, FALSE));
8784  }
8785  RB_OBJ_WRITTEN(iseq, Qundef, str);
8786  if (popped) {
8787  ADD_INSN(ret, line_node, pop);
8788  }
8789  return TRUE;
8790  }
8791  /* optimization shortcut
8792  * obj["literal"] -> opt_aref_with(obj, "literal")
8793  */
8794  if (get_node_call_nd_mid(node) == idAREF && !private_recv_p(node) && get_nd_args(node) &&
8795  nd_type_p(get_nd_args(node), NODE_LIST) && RNODE_LIST(get_nd_args(node))->as.nd_alen == 1 &&
8796  (nd_type_p(RNODE_LIST(get_nd_args(node))->nd_head, NODE_STR) || nd_type_p(RNODE_LIST(get_nd_args(node))->nd_head, NODE_FILE)) &&
8797  ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
8798  !frozen_string_literal_p(iseq) &&
8799  ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
8800  VALUE str = get_string_value(RNODE_LIST(get_nd_args(node))->nd_head);
8801  CHECK(COMPILE(ret, "recv", get_nd_recv(node)));
8802  ADD_INSN2(ret, line_node, opt_aref_with, str,
8803  new_callinfo(iseq, idAREF, 1, 0, NULL, FALSE));
8804  RB_OBJ_WRITTEN(iseq, Qundef, str);
8805  if (popped) {
8806  ADD_INSN(ret, line_node, pop);
8807  }
8808  return TRUE;
8809  }
8810  return FALSE;
8811 }
8812 
8813 static int
8814 iseq_has_builtin_function_table(const rb_iseq_t *iseq)
8815 {
8816  return ISEQ_COMPILE_DATA(iseq)->builtin_function_table != NULL;
8817 }
8818 
8819 static const struct rb_builtin_function *
8820 iseq_builtin_function_lookup(const rb_iseq_t *iseq, const char *name)
8821 {
8822  int i;
8823  const struct rb_builtin_function *table = ISEQ_COMPILE_DATA(iseq)->builtin_function_table;
8824  for (i=0; table[i].index != -1; i++) {
8825  if (strcmp(table[i].name, name) == 0) {
8826  return &table[i];
8827  }
8828  }
8829  return NULL;
8830 }
8831 
8832 static const char *
8833 iseq_builtin_function_name(const enum node_type type, const NODE *recv, ID mid)
8834 {
8835  const char *name = rb_id2name(mid);
8836  static const char prefix[] = "__builtin_";
8837  const size_t prefix_len = sizeof(prefix) - 1;
8838 
8839  switch (type) {
8840  case NODE_CALL:
8841  if (recv) {
8842  switch (nd_type(recv)) {
8843  case NODE_VCALL:
8844  if (RNODE_VCALL(recv)->nd_mid == rb_intern("__builtin")) {
8845  return name;
8846  }
8847  break;
8848  case NODE_CONST:
8849  if (RNODE_CONST(recv)->nd_vid == rb_intern("Primitive")) {
8850  return name;
8851  }
8852  break;
8853  default: break;
8854  }
8855  }
8856  break;
8857  case NODE_VCALL:
8858  case NODE_FCALL:
8859  if (UNLIKELY(strncmp(prefix, name, prefix_len) == 0)) {
8860  return &name[prefix_len];
8861  }
8862  break;
8863  default: break;
8864  }
8865  return NULL;
8866 }
8867 
8868 static int
8869 delegate_call_p(const rb_iseq_t *iseq, unsigned int argc, const LINK_ANCHOR *args, unsigned int *pstart_index)
8870 {
8871 
8872  if (argc == 0) {
8873  *pstart_index = 0;
8874  return TRUE;
8875  }
8876  else if (argc <= ISEQ_BODY(iseq)->local_table_size) {
8877  unsigned int start=0;
8878 
8879  // local_table: [p1, p2, p3, l1, l2, l3]
8880  // arguments: [p3, l1, l2] -> 2
8881  for (start = 0;
8882  argc + start <= ISEQ_BODY(iseq)->local_table_size;
8883  start++) {
8884  const LINK_ELEMENT *elem = FIRST_ELEMENT(args);
8885 
8886  for (unsigned int i=start; i-start<argc; i++) {
8887  if (IS_INSN(elem) &&
8888  INSN_OF(elem) == BIN(getlocal)) {
8889  int local_index = FIX2INT(OPERAND_AT(elem, 0));
8890  int local_level = FIX2INT(OPERAND_AT(elem, 1));
8891 
8892  if (local_level == 0) {
8893  unsigned int index = ISEQ_BODY(iseq)->local_table_size - (local_index - VM_ENV_DATA_SIZE + 1);
8894  if (0) { // for debug
8895  fprintf(stderr, "lvar:%s (%d), id:%s (%d) local_index:%d, local_size:%d\n",
8896  rb_id2name(ISEQ_BODY(iseq)->local_table[i]), i,
8897  rb_id2name(ISEQ_BODY(iseq)->local_table[index]), index,
8898  local_index, (int)ISEQ_BODY(iseq)->local_table_size);
8899  }
8900  if (i == index) {
8901  elem = elem->next;
8902  continue; /* for */
8903  }
8904  else {
8905  goto next;
8906  }
8907  }
8908  else {
8909  goto fail; // level != 0 is unsupported
8910  }
8911  }
8912  else {
8913  goto fail; // insn is not a getlocal
8914  }
8915  }
8916  goto success;
8917  next:;
8918  }
8919  fail:
8920  return FALSE;
8921  success:
8922  *pstart_index = start;
8923  return TRUE;
8924  }
8925  else {
8926  return FALSE;
8927  }
8928 }
8929 
8930 // Compile Primitive.attr! :leaf, ...
8931 static int
8932 compile_builtin_attr(rb_iseq_t *iseq, const NODE *node)
8933 {
8934  VALUE symbol;
8935  VALUE string;
8936  if (!node) goto no_arg;
8937  while (node) {
8938  if (!nd_type_p(node, NODE_LIST)) goto bad_arg;
8939  const NODE *next = RNODE_LIST(node)->nd_next;
8940 
8941  node = RNODE_LIST(node)->nd_head;
8942  if (!node) goto no_arg;
8943  switch (nd_type(node)) {
8944  case NODE_SYM:
8945  symbol = rb_node_sym_string_val(node);
8946  break;
8947  default:
8948  goto bad_arg;
8949  }
8950 
8951  if (!SYMBOL_P(symbol)) goto non_symbol_arg;
8952 
8953  string = rb_sym2str(symbol);
8954  if (strcmp(RSTRING_PTR(string), "leaf") == 0) {
8955  ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_LEAF;
8956  }
8957  else if (strcmp(RSTRING_PTR(string), "inline_block") == 0) {
8958  ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_INLINE_BLOCK;
8959  }
8960  else if (strcmp(RSTRING_PTR(string), "use_block") == 0) {
8961  iseq_set_use_block(iseq);
8962  }
8963  else if (strcmp(RSTRING_PTR(string), "c_trace") == 0) {
8964  // Let the iseq act like a C method in backtraces
8965  ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_C_TRACE;
8966  }
8967  else {
8968  goto unknown_arg;
8969  }
8970  node = next;
8971  }
8972  return COMPILE_OK;
8973  no_arg:
8974  COMPILE_ERROR(ERROR_ARGS "attr!: no argument");
8975  return COMPILE_NG;
8976  non_symbol_arg:
8977  COMPILE_ERROR(ERROR_ARGS "non symbol argument to attr!: %s", rb_builtin_class_name(symbol));
8978  return COMPILE_NG;
8979  unknown_arg:
8980  COMPILE_ERROR(ERROR_ARGS "unknown argument to attr!: %s", RSTRING_PTR(string));
8981  return COMPILE_NG;
8982  bad_arg:
8983  UNKNOWN_NODE("attr!", node, COMPILE_NG);
8984 }
8985 
8986 static int
8987 compile_builtin_arg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *node, const NODE *line_node, int popped)
8988 {
8989  VALUE name;
8990 
8991  if (!node) goto no_arg;
8992  if (!nd_type_p(node, NODE_LIST)) goto bad_arg;
8993  if (RNODE_LIST(node)->nd_next) goto too_many_arg;
8994  node = RNODE_LIST(node)->nd_head;
8995  if (!node) goto no_arg;
8996  switch (nd_type(node)) {
8997  case NODE_SYM:
8998  name = rb_node_sym_string_val(node);
8999  break;
9000  default:
9001  goto bad_arg;
9002  }
9003  if (!SYMBOL_P(name)) goto non_symbol_arg;
9004  if (!popped) {
9005  compile_lvar(iseq, ret, line_node, SYM2ID(name));
9006  }
9007  return COMPILE_OK;
9008  no_arg:
9009  COMPILE_ERROR(ERROR_ARGS "arg!: no argument");
9010  return COMPILE_NG;
9011  too_many_arg:
9012  COMPILE_ERROR(ERROR_ARGS "arg!: too many argument");
9013  return COMPILE_NG;
9014  non_symbol_arg:
9015  COMPILE_ERROR(ERROR_ARGS "non symbol argument to arg!: %s",
9016  rb_builtin_class_name(name));
9017  return COMPILE_NG;
9018  bad_arg:
9019  UNKNOWN_NODE("arg!", node, COMPILE_NG);
9020 }
9021 
9022 static NODE *
9023 mandatory_node(const rb_iseq_t *iseq, const NODE *cond_node)
9024 {
9025  const NODE *node = ISEQ_COMPILE_DATA(iseq)->root_node;
9026  if (nd_type(node) == NODE_IF && RNODE_IF(node)->nd_cond == cond_node) {
9027  return RNODE_IF(node)->nd_body;
9028  }
9029  else {
9030  rb_bug("mandatory_node: can't find mandatory node");
9031  }
9032 }
9033 
9034 static int
9035 compile_builtin_mandatory_only_method(rb_iseq_t *iseq, const NODE *node, const NODE *line_node)
9036 {
9037  // arguments
9038  struct rb_args_info args = {
9039  .pre_args_num = ISEQ_BODY(iseq)->param.lead_num,
9040  };
9041  rb_node_args_t args_node;
9042  rb_node_init(RNODE(&args_node), NODE_ARGS);
9043  args_node.nd_ainfo = args;
9044 
9045  // local table without non-mandatory parameters
9046  const int skip_local_size = ISEQ_BODY(iseq)->param.size - ISEQ_BODY(iseq)->param.lead_num;
9047  const int table_size = ISEQ_BODY(iseq)->local_table_size - skip_local_size;
9048 
9049  VALUE idtmp = 0;
9050  rb_ast_id_table_t *tbl = ALLOCV(idtmp, sizeof(rb_ast_id_table_t) + table_size * sizeof(ID));
9051  tbl->size = table_size;
9052 
9053  int i;
9054 
9055  // lead parameters
9056  for (i=0; i<ISEQ_BODY(iseq)->param.lead_num; i++) {
9057  tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i];
9058  }
9059  // local variables
9060  for (; i<table_size; i++) {
9061  tbl->ids[i] = ISEQ_BODY(iseq)->local_table[i + skip_local_size];
9062  }
9063 
9064  rb_node_scope_t scope_node;
9065  rb_node_init(RNODE(&scope_node), NODE_SCOPE);
9066  scope_node.nd_tbl = tbl;
9067  scope_node.nd_body = mandatory_node(iseq, node);
9068  scope_node.nd_args = &args_node;
9069 
9070  VALUE ast_value = rb_ruby_ast_new(RNODE(&scope_node));
9071 
9072  ISEQ_BODY(iseq)->mandatory_only_iseq =
9073  rb_iseq_new_with_opt(ast_value, rb_iseq_base_label(iseq),
9074  rb_iseq_path(iseq), rb_iseq_realpath(iseq),
9075  nd_line(line_node), NULL, 0,
9076  ISEQ_TYPE_METHOD, ISEQ_COMPILE_DATA(iseq)->option,
9077  ISEQ_BODY(iseq)->variable.script_lines);
9078 
9079  ALLOCV_END(idtmp);
9080  return COMPILE_OK;
9081 }
9082 
9083 static int
9084 compile_builtin_function_call(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, const NODE *line_node, int popped,
9085  const rb_iseq_t *parent_block, LINK_ANCHOR *args, const char *builtin_func)
9086 {
9087  NODE *args_node = get_nd_args(node);
9088 
9089  if (parent_block != NULL) {
9090  COMPILE_ERROR(ERROR_ARGS_AT(line_node) "should not call builtins here.");
9091  return COMPILE_NG;
9092  }
9093  else {
9094 # define BUILTIN_INLINE_PREFIX "_bi"
9095  char inline_func[sizeof(BUILTIN_INLINE_PREFIX) + DECIMAL_SIZE_OF(int)];
9096  bool cconst = false;
9097  retry:;
9098  const struct rb_builtin_function *bf = iseq_builtin_function_lookup(iseq, builtin_func);
9099 
9100  if (bf == NULL) {
9101  if (strcmp("cstmt!", builtin_func) == 0 ||
9102  strcmp("cexpr!", builtin_func) == 0) {
9103  // ok
9104  }
9105  else if (strcmp("cconst!", builtin_func) == 0) {
9106  cconst = true;
9107  }
9108  else if (strcmp("cinit!", builtin_func) == 0) {
9109  // ignore
9110  return COMPILE_OK;
9111  }
9112  else if (strcmp("attr!", builtin_func) == 0) {
9113  return compile_builtin_attr(iseq, args_node);
9114  }
9115  else if (strcmp("arg!", builtin_func) == 0) {
9116  return compile_builtin_arg(iseq, ret, args_node, line_node, popped);
9117  }
9118  else if (strcmp("mandatory_only?", builtin_func) == 0) {
9119  if (popped) {
9120  rb_bug("mandatory_only? should be in if condition");
9121  }
9122  else if (!LIST_INSN_SIZE_ZERO(ret)) {
9123  rb_bug("mandatory_only? should be put on top");
9124  }
9125 
9126  ADD_INSN1(ret, line_node, putobject, Qfalse);
9127  return compile_builtin_mandatory_only_method(iseq, node, line_node);
9128  }
9129  else if (1) {
9130  rb_bug("can't find builtin function:%s", builtin_func);
9131  }
9132  else {
9133  COMPILE_ERROR(ERROR_ARGS "can't find builtin function:%s", builtin_func);
9134  return COMPILE_NG;
9135  }
9136 
9137  int inline_index = nd_line(node);
9138  snprintf(inline_func, sizeof(inline_func), BUILTIN_INLINE_PREFIX "%d", inline_index);
9139  builtin_func = inline_func;
9140  args_node = NULL;
9141  goto retry;
9142  }
9143 
9144  if (cconst) {
9145  typedef VALUE(*builtin_func0)(void *, VALUE);
9146  VALUE const_val = (*(builtin_func0)(uintptr_t)bf->func_ptr)(NULL, Qnil);
9147  ADD_INSN1(ret, line_node, putobject, const_val);
9148  return COMPILE_OK;
9149  }
9150 
9151  // fprintf(stderr, "func_name:%s -> %p\n", builtin_func, bf->func_ptr);
9152 
9153  unsigned int flag = 0;
9154  struct rb_callinfo_kwarg *keywords = NULL;
9155  VALUE argc = setup_args(iseq, args, args_node, &flag, &keywords);
9156 
9157  if (FIX2INT(argc) != bf->argc) {
9158  COMPILE_ERROR(ERROR_ARGS "argc is not match for builtin function:%s (expect %d but %d)",
9159  builtin_func, bf->argc, FIX2INT(argc));
9160  return COMPILE_NG;
9161  }
9162 
9163  unsigned int start_index;
9164  if (delegate_call_p(iseq, FIX2INT(argc), args, &start_index)) {
9165  ADD_INSN2(ret, line_node, opt_invokebuiltin_delegate, bf, INT2FIX(start_index));
9166  }
9167  else {
9168  ADD_SEQ(ret, args);
9169  ADD_INSN1(ret, line_node, invokebuiltin, bf);
9170  }
9171 
9172  if (popped) ADD_INSN(ret, line_node, pop);
9173  return COMPILE_OK;
9174  }
9175 }
9176 
9177 static int
9178 compile_call(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, const enum node_type type, const NODE *const line_node, int popped, bool assume_receiver)
9179 {
9180  /* call: obj.method(...)
9181  * fcall: func(...)
9182  * vcall: func
9183  */
9184  DECL_ANCHOR(recv);
9185  DECL_ANCHOR(args);
9186  ID mid = get_node_call_nd_mid(node);
9187  VALUE argc;
9188  unsigned int flag = 0;
9189  struct rb_callinfo_kwarg *keywords = NULL;
9190  const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9191  LABEL *else_label = NULL;
9192  VALUE branches = Qfalse;
9193 
9194  ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
9195 
9196  INIT_ANCHOR(recv);
9197  INIT_ANCHOR(args);
9198 #if OPT_SUPPORT_JOKE
9199  if (nd_type_p(node, NODE_VCALL)) {
9200  ID id_bitblt;
9201  ID id_answer;
9202 
9203  CONST_ID(id_bitblt, "bitblt");
9204  CONST_ID(id_answer, "the_answer_to_life_the_universe_and_everything");
9205 
9206  if (mid == id_bitblt) {
9207  ADD_INSN(ret, line_node, bitblt);
9208  return COMPILE_OK;
9209  }
9210  else if (mid == id_answer) {
9211  ADD_INSN(ret, line_node, answer);
9212  return COMPILE_OK;
9213  }
9214  }
9215  /* only joke */
9216  {
9217  ID goto_id;
9218  ID label_id;
9219 
9220  CONST_ID(goto_id, "__goto__");
9221  CONST_ID(label_id, "__label__");
9222 
9223  if (nd_type_p(node, NODE_FCALL) &&
9224  (mid == goto_id || mid == label_id)) {
9225  LABEL *label;
9226  st_data_t data;
9227  st_table *labels_table = ISEQ_COMPILE_DATA(iseq)->labels_table;
9228  VALUE label_name;
9229 
9230  if (!labels_table) {
9231  labels_table = st_init_numtable();
9232  ISEQ_COMPILE_DATA(iseq)->labels_table = labels_table;
9233  }
9234  {
9235  COMPILE_ERROR(ERROR_ARGS "invalid goto/label format");
9236  return COMPILE_NG;
9237  }
9238 
9239  if (mid == goto_id) {
9240  ADD_INSNL(ret, line_node, jump, label);
9241  }
9242  else {
9243  ADD_LABEL(ret, label);
9244  }
9245  return COMPILE_OK;
9246  }
9247  }
9248 #endif
9249 
9250  const char *builtin_func;
9251  if (UNLIKELY(iseq_has_builtin_function_table(iseq)) &&
9252  (builtin_func = iseq_builtin_function_name(type, get_nd_recv(node), mid)) != NULL) {
9253  return compile_builtin_function_call(iseq, ret, node, line_node, popped, parent_block, args, builtin_func);
9254  }
9255 
9256  /* receiver */
9257  if (!assume_receiver) {
9258  if (type == NODE_CALL || type == NODE_OPCALL || type == NODE_QCALL) {
9259  int idx, level;
9260 
9261  if (mid == idCall &&
9262  nd_type_p(get_nd_recv(node), NODE_LVAR) &&
9263  iseq_block_param_id_p(iseq, RNODE_LVAR(get_nd_recv(node))->nd_vid, &idx, &level)) {
9264  ADD_INSN2(recv, get_nd_recv(node), getblockparamproxy, INT2FIX(idx + VM_ENV_DATA_SIZE - 1), INT2FIX(level));
9265  }
9266  else if (private_recv_p(node)) {
9267  ADD_INSN(recv, node, putself);
9268  flag |= VM_CALL_FCALL;
9269  }
9270  else {
9271  CHECK(COMPILE(recv, "recv", get_nd_recv(node)));
9272  }
9273 
9274  if (type == NODE_QCALL) {
9275  else_label = qcall_branch_start(iseq, recv, &branches, node, line_node);
9276  }
9277  }
9278  else if (type == NODE_FCALL || type == NODE_VCALL) {
9279  ADD_CALL_RECEIVER(recv, line_node);
9280  }
9281  }
9282 
9283  /* args */
9284  if (type != NODE_VCALL) {
9285  argc = setup_args(iseq, args, get_nd_args(node), &flag, &keywords);
9286  CHECK(!NIL_P(argc));
9287  }
9288  else {
9289  argc = INT2FIX(0);
9290  }
9291 
9292  ADD_SEQ(ret, recv);
9293  ADD_SEQ(ret, args);
9294 
9295  debugp_param("call args argc", argc);
9296  debugp_param("call method", ID2SYM(mid));
9297 
9298  switch ((int)type) {
9299  case NODE_VCALL:
9300  flag |= VM_CALL_VCALL;
9301  /* VCALL is funcall, so fall through */
9302  case NODE_FCALL:
9303  flag |= VM_CALL_FCALL;
9304  }
9305 
9306  if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
9307  ADD_INSN(ret, line_node, splatkw);
9308  }
9309  ADD_SEND_R(ret, line_node, mid, argc, parent_block, INT2FIX(flag), keywords);
9310 
9311  qcall_branch_end(iseq, ret, else_label, branches, node, line_node);
9312  if (popped) {
9313  ADD_INSN(ret, line_node, pop);
9314  }
9315  return COMPILE_OK;
9316 }
9317 
9318 static int
9319 compile_op_asgn1(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
9320 {
9321  const int line = nd_line(node);
9322  VALUE argc;
9323  unsigned int flag = 0;
9324  int asgnflag = 0;
9325  ID id = RNODE_OP_ASGN1(node)->nd_mid;
9326 
9327  /*
9328  * a[x] (op)= y
9329  *
9330  * nil # nil
9331  * eval a # nil a
9332  * eval x # nil a x
9333  * dupn 2 # nil a x a x
9334  * send :[] # nil a x a[x]
9335  * eval y # nil a x a[x] y
9336  * send op # nil a x ret
9337  * setn 3 # ret a x ret
9338  * send []= # ret ?
9339  * pop # ret
9340  */
9341 
9342  /*
9343  * nd_recv[nd_args->nd_body] (nd_mid)= nd_args->nd_head;
9344  * NODE_OP_ASGN nd_recv
9345  * nd_args->nd_head
9346  * nd_args->nd_body
9347  * nd_mid
9348  */
9349 
9350  if (!popped) {
9351  ADD_INSN(ret, node, putnil);
9352  }
9353  asgnflag = COMPILE_RECV(ret, "NODE_OP_ASGN1 recv", node, RNODE_OP_ASGN1(node)->nd_recv);
9354  CHECK(asgnflag != -1);
9355  switch (nd_type(RNODE_OP_ASGN1(node)->nd_index)) {
9356  case NODE_ZLIST:
9357  argc = INT2FIX(0);
9358  break;
9359  default:
9360  argc = setup_args(iseq, ret, RNODE_OP_ASGN1(node)->nd_index, &flag, NULL);
9361  CHECK(!NIL_P(argc));
9362  }
9363  int dup_argn = FIX2INT(argc) + 1;
9364  ADD_INSN1(ret, node, dupn, INT2FIX(dup_argn));
9365  flag |= asgnflag;
9366  ADD_SEND_R(ret, node, idAREF, argc, NULL, INT2FIX(flag & ~VM_CALL_ARGS_SPLAT_MUT), NULL);
9367 
9368  if (id == idOROP || id == idANDOP) {
9369  /* a[x] ||= y or a[x] &&= y
9370 
9371  unless/if a[x]
9372  a[x]= y
9373  else
9374  nil
9375  end
9376  */
9377  LABEL *label = NEW_LABEL(line);
9378  LABEL *lfin = NEW_LABEL(line);
9379 
9380  ADD_INSN(ret, node, dup);
9381  if (id == idOROP) {
9382  ADD_INSNL(ret, node, branchif, label);
9383  }
9384  else { /* idANDOP */
9385  ADD_INSNL(ret, node, branchunless, label);
9386  }
9387  ADD_INSN(ret, node, pop);
9388 
9389  CHECK(COMPILE(ret, "NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9390  if (!popped) {
9391  ADD_INSN1(ret, node, setn, INT2FIX(dup_argn+1));
9392  }
9393  if (flag & VM_CALL_ARGS_SPLAT) {
9394  if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9395  ADD_INSN(ret, node, swap);
9396  ADD_INSN1(ret, node, splatarray, Qtrue);
9397  ADD_INSN(ret, node, swap);
9398  flag |= VM_CALL_ARGS_SPLAT_MUT;
9399  }
9400  ADD_INSN1(ret, node, pushtoarray, INT2FIX(1));
9401  ADD_SEND_R(ret, node, idASET, argc, NULL, INT2FIX(flag), NULL);
9402  }
9403  else {
9404  ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL, INT2FIX(flag), NULL);
9405  }
9406  ADD_INSN(ret, node, pop);
9407  ADD_INSNL(ret, node, jump, lfin);
9408  ADD_LABEL(ret, label);
9409  if (!popped) {
9410  ADD_INSN1(ret, node, setn, INT2FIX(dup_argn+1));
9411  }
9412  ADD_INSN1(ret, node, adjuststack, INT2FIX(dup_argn+1));
9413  ADD_LABEL(ret, lfin);
9414  }
9415  else {
9416  CHECK(COMPILE(ret, "NODE_OP_ASGN1 nd_rvalue: ", RNODE_OP_ASGN1(node)->nd_rvalue));
9417  ADD_SEND(ret, node, id, INT2FIX(1));
9418  if (!popped) {
9419  ADD_INSN1(ret, node, setn, INT2FIX(dup_argn+1));
9420  }
9421  if (flag & VM_CALL_ARGS_SPLAT) {
9422  if (flag & VM_CALL_KW_SPLAT) {
9423  ADD_INSN1(ret, node, topn, INT2FIX(2));
9424  if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9425  ADD_INSN1(ret, node, splatarray, Qtrue);
9426  flag |= VM_CALL_ARGS_SPLAT_MUT;
9427  }
9428  ADD_INSN(ret, node, swap);
9429  ADD_INSN1(ret, node, pushtoarray, INT2FIX(1));
9430  ADD_INSN1(ret, node, setn, INT2FIX(2));
9431  ADD_INSN(ret, node, pop);
9432  }
9433  else {
9434  if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
9435  ADD_INSN(ret, node, swap);
9436  ADD_INSN1(ret, node, splatarray, Qtrue);
9437  ADD_INSN(ret, node, swap);
9438  flag |= VM_CALL_ARGS_SPLAT_MUT;
9439  }
9440  ADD_INSN1(ret, node, pushtoarray, INT2FIX(1));
9441  }
9442  ADD_SEND_R(ret, node, idASET, argc, NULL, INT2FIX(flag), NULL);
9443  }
9444  else {
9445  ADD_SEND_R(ret, node, idASET, FIXNUM_INC(argc, 1), NULL, INT2FIX(flag), NULL);
9446  }
9447  ADD_INSN(ret, node, pop);
9448  }
9449  return COMPILE_OK;
9450 }
9451 
9452 static int
9453 compile_op_asgn2(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
9454 {
9455  const int line = nd_line(node);
9456  ID atype = RNODE_OP_ASGN2(node)->nd_mid;
9457  ID vid = RNODE_OP_ASGN2(node)->nd_vid, aid = rb_id_attrset(vid);
9458  int asgnflag;
9459  LABEL *lfin = NEW_LABEL(line);
9460  LABEL *lcfin = NEW_LABEL(line);
9461  LABEL *lskip = 0;
9462  /*
9463  class C; attr_accessor :c; end
9464  r = C.new
9465  r.a &&= v # asgn2
9466 
9467  eval r # r
9468  dup # r r
9469  eval r.a # r o
9470 
9471  # or
9472  dup # r o o
9473  if lcfin # r o
9474  pop # r
9475  eval v # r v
9476  swap # v r
9477  topn 1 # v r v
9478  send a= # v ?
9479  jump lfin # v ?
9480 
9481  lcfin: # r o
9482  swap # o r
9483 
9484  lfin: # o ?
9485  pop # o
9486 
9487  # or (popped)
9488  if lcfin # r
9489  eval v # r v
9490  send a= # ?
9491  jump lfin # ?
9492 
9493  lcfin: # r
9494 
9495  lfin: # ?
9496  pop #
9497 
9498  # and
9499  dup # r o o
9500  unless lcfin
9501  pop # r
9502  eval v # r v
9503  swap # v r
9504  topn 1 # v r v
9505  send a= # v ?
9506  jump lfin # v ?
9507 
9508  # others
9509  eval v # r o v
9510  send ?? # r w
9511  send a= # w
9512 
9513  */
9514 
9515  asgnflag = COMPILE_RECV(ret, "NODE_OP_ASGN2#recv", node, RNODE_OP_ASGN2(node)->nd_recv);
9516  CHECK(asgnflag != -1);
9517  if (RNODE_OP_ASGN2(node)->nd_aid) {
9518  lskip = NEW_LABEL(line);
9519  ADD_INSN(ret, node, dup);
9520  ADD_INSNL(ret, node, branchnil, lskip);
9521  }
9522  ADD_INSN(ret, node, dup);
9523  ADD_SEND_WITH_FLAG(ret, node, vid, INT2FIX(0), INT2FIX(asgnflag));
9524 
9525  if (atype == idOROP || atype == idANDOP) {
9526  if (!popped) {
9527  ADD_INSN(ret, node, dup);
9528  }
9529  if (atype == idOROP) {
9530  ADD_INSNL(ret, node, branchif, lcfin);
9531  }
9532  else { /* idANDOP */
9533  ADD_INSNL(ret, node, branchunless, lcfin);
9534  }
9535  if (!popped) {
9536  ADD_INSN(ret, node, pop);
9537  }
9538  CHECK(COMPILE(ret, "NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9539  if (!popped) {
9540  ADD_INSN(ret, node, swap);
9541  ADD_INSN1(ret, node, topn, INT2FIX(1));
9542  }
9543  ADD_SEND_WITH_FLAG(ret, node, aid, INT2FIX(1), INT2FIX(asgnflag));
9544  ADD_INSNL(ret, node, jump, lfin);
9545 
9546  ADD_LABEL(ret, lcfin);
9547  if (!popped) {
9548  ADD_INSN(ret, node, swap);
9549  }
9550 
9551  ADD_LABEL(ret, lfin);
9552  }
9553  else {
9554  CHECK(COMPILE(ret, "NODE_OP_ASGN2 val", RNODE_OP_ASGN2(node)->nd_value));
9555  ADD_SEND(ret, node, atype, INT2FIX(1));
9556  if (!popped) {
9557  ADD_INSN(ret, node, swap);
9558  ADD_INSN1(ret, node, topn, INT2FIX(1));
9559  }
9560  ADD_SEND_WITH_FLAG(ret, node, aid, INT2FIX(1), INT2FIX(asgnflag));
9561  }
9562  if (lskip && popped) {
9563  ADD_LABEL(ret, lskip);
9564  }
9565  ADD_INSN(ret, node, pop);
9566  if (lskip && !popped) {
9567  ADD_LABEL(ret, lskip);
9568  }
9569  return COMPILE_OK;
9570 }
9571 
9572 static int compile_shareable_constant_value(rb_iseq_t *iseq, LINK_ANCHOR *ret, enum rb_parser_shareability shareable, const NODE *lhs, const NODE *value);
9573 
9574 static int
9575 compile_op_cdecl(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
9576 {
9577  const int line = nd_line(node);
9578  LABEL *lfin = 0;
9579  LABEL *lassign = 0;
9580  ID mid;
9581 
9582  switch (nd_type(RNODE_OP_CDECL(node)->nd_head)) {
9583  case NODE_COLON3:
9584  ADD_INSN1(ret, node, putobject, rb_cObject);
9585  break;
9586  case NODE_COLON2:
9587  CHECK(COMPILE(ret, "NODE_OP_CDECL/colon2#nd_head", RNODE_COLON2(RNODE_OP_CDECL(node)->nd_head)->nd_head));
9588  break;
9589  default:
9590  COMPILE_ERROR(ERROR_ARGS "%s: invalid node in NODE_OP_CDECL",
9591  ruby_node_name(nd_type(RNODE_OP_CDECL(node)->nd_head)));
9592  return COMPILE_NG;
9593  }
9594  mid = get_node_colon_nd_mid(RNODE_OP_CDECL(node)->nd_head);
9595  /* cref */
9596  if (RNODE_OP_CDECL(node)->nd_aid == idOROP) {
9597  lassign = NEW_LABEL(line);
9598  ADD_INSN(ret, node, dup); /* cref cref */
9599  ADD_INSN3(ret, node, defined, INT2FIX(DEFINED_CONST_FROM),
9600  ID2SYM(mid), Qtrue); /* cref bool */
9601  ADD_INSNL(ret, node, branchunless, lassign); /* cref */
9602  }
9603  ADD_INSN(ret, node, dup); /* cref cref */
9604  ADD_INSN1(ret, node, putobject, Qtrue);
9605  ADD_INSN1(ret, node, getconstant, ID2SYM(mid)); /* cref obj */
9606 
9607  if (RNODE_OP_CDECL(node)->nd_aid == idOROP || RNODE_OP_CDECL(node)->nd_aid == idANDOP) {
9608  lfin = NEW_LABEL(line);
9609  if (!popped) ADD_INSN(ret, node, dup); /* cref [obj] obj */
9610  if (RNODE_OP_CDECL(node)->nd_aid == idOROP)
9611  ADD_INSNL(ret, node, branchif, lfin);
9612  else /* idANDOP */
9613  ADD_INSNL(ret, node, branchunless, lfin);
9614  /* cref [obj] */
9615  if (!popped) ADD_INSN(ret, node, pop); /* cref */
9616  if (lassign) ADD_LABEL(ret, lassign);
9617  CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9618  /* cref value */
9619  if (popped)
9620  ADD_INSN1(ret, node, topn, INT2FIX(1)); /* cref value cref */
9621  else {
9622  ADD_INSN1(ret, node, dupn, INT2FIX(2)); /* cref value cref value */
9623  ADD_INSN(ret, node, swap); /* cref value value cref */
9624  }
9625  ADD_INSN1(ret, node, setconstant, ID2SYM(mid)); /* cref [value] */
9626  ADD_LABEL(ret, lfin); /* cref [value] */
9627  if (!popped) ADD_INSN(ret, node, swap); /* [value] cref */
9628  ADD_INSN(ret, node, pop); /* [value] */
9629  }
9630  else {
9631  CHECK(compile_shareable_constant_value(iseq, ret, RNODE_OP_CDECL(node)->shareability, RNODE_OP_CDECL(node)->nd_head, RNODE_OP_CDECL(node)->nd_value));
9632  /* cref obj value */
9633  ADD_CALL(ret, node, RNODE_OP_CDECL(node)->nd_aid, INT2FIX(1));
9634  /* cref value */
9635  ADD_INSN(ret, node, swap); /* value cref */
9636  if (!popped) {
9637  ADD_INSN1(ret, node, topn, INT2FIX(1)); /* value cref value */
9638  ADD_INSN(ret, node, swap); /* value value cref */
9639  }
9640  ADD_INSN1(ret, node, setconstant, ID2SYM(mid));
9641  }
9642  return COMPILE_OK;
9643 }
9644 
9645 static int
9646 compile_op_log(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped, const enum node_type type)
9647 {
9648  const int line = nd_line(node);
9649  LABEL *lfin = NEW_LABEL(line);
9650  LABEL *lassign;
9651 
9652  if (type == NODE_OP_ASGN_OR && !nd_type_p(RNODE_OP_ASGN_OR(node)->nd_head, NODE_IVAR)) {
9653  LABEL *lfinish[2];
9654  lfinish[0] = lfin;
9655  lfinish[1] = 0;
9656  defined_expr(iseq, ret, RNODE_OP_ASGN_OR(node)->nd_head, lfinish, Qfalse, false);
9657  lassign = lfinish[1];
9658  if (!lassign) {
9659  lassign = NEW_LABEL(line);
9660  }
9661  ADD_INSNL(ret, node, branchunless, lassign);
9662  }
9663  else {
9664  lassign = NEW_LABEL(line);
9665  }
9666 
9667  CHECK(COMPILE(ret, "NODE_OP_ASGN_AND/OR#nd_head", RNODE_OP_ASGN_OR(node)->nd_head));
9668 
9669  if (!popped) {
9670  ADD_INSN(ret, node, dup);
9671  }
9672 
9673  if (type == NODE_OP_ASGN_AND) {
9674  ADD_INSNL(ret, node, branchunless, lfin);
9675  }
9676  else {
9677  ADD_INSNL(ret, node, branchif, lfin);
9678  }
9679 
9680  if (!popped) {
9681  ADD_INSN(ret, node, pop);
9682  }
9683 
9684  ADD_LABEL(ret, lassign);
9685  CHECK(COMPILE_(ret, "NODE_OP_ASGN_AND/OR#nd_value", RNODE_OP_ASGN_OR(node)->nd_value, popped));
9686  ADD_LABEL(ret, lfin);
9687  return COMPILE_OK;
9688 }
9689 
9690 static int
9691 compile_super(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped, const enum node_type type)
9692 {
9693  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
9694  DECL_ANCHOR(args);
9695  int argc;
9696  unsigned int flag = 0;
9697  struct rb_callinfo_kwarg *keywords = NULL;
9698  const rb_iseq_t *parent_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9699  int use_block = 1;
9700 
9701  INIT_ANCHOR(args);
9702  ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
9703 
9704  if (type == NODE_SUPER) {
9705  VALUE vargc = setup_args(iseq, args, RNODE_SUPER(node)->nd_args, &flag, &keywords);
9706  CHECK(!NIL_P(vargc));
9707  argc = FIX2INT(vargc);
9708  if ((flag & VM_CALL_ARGS_BLOCKARG) && (flag & VM_CALL_KW_SPLAT) && !(flag & VM_CALL_KW_SPLAT_MUT)) {
9709  ADD_INSN(args, node, splatkw);
9710  }
9711 
9712  if (flag & VM_CALL_ARGS_BLOCKARG) {
9713  use_block = 0;
9714  }
9715  }
9716  else {
9717  /* NODE_ZSUPER */
9718  int i;
9719  const rb_iseq_t *liseq = body->local_iseq;
9720  const struct rb_iseq_constant_body *const local_body = ISEQ_BODY(liseq);
9721  const struct rb_iseq_param_keyword *const local_kwd = local_body->param.keyword;
9722  int lvar_level = get_lvar_level(iseq);
9723 
9724  argc = local_body->param.lead_num;
9725 
9726  /* normal arguments */
9727  for (i = 0; i < local_body->param.lead_num; i++) {
9728  int idx = local_body->local_table_size - i;
9729  ADD_GETLOCAL(args, node, idx, lvar_level);
9730  }
9731 
9732  /* forward ... */
9733  if (local_body->param.flags.forwardable) {
9734  flag |= VM_CALL_FORWARDING;
9735  int idx = local_body->local_table_size - get_local_var_idx(liseq, idDot3);
9736  ADD_GETLOCAL(args, node, idx, lvar_level);
9737  }
9738 
9739  if (local_body->param.flags.has_opt) {
9740  /* optional arguments */
9741  int j;
9742  for (j = 0; j < local_body->param.opt_num; j++) {
9743  int idx = local_body->local_table_size - (i + j);
9744  ADD_GETLOCAL(args, node, idx, lvar_level);
9745  }
9746  i += j;
9747  argc = i;
9748  }
9749  if (local_body->param.flags.has_rest) {
9750  /* rest argument */
9751  int idx = local_body->local_table_size - local_body->param.rest_start;
9752  ADD_GETLOCAL(args, node, idx, lvar_level);
9753  ADD_INSN1(args, node, splatarray, RBOOL(local_body->param.flags.has_post));
9754 
9755  argc = local_body->param.rest_start + 1;
9756  flag |= VM_CALL_ARGS_SPLAT;
9757  }
9758  if (local_body->param.flags.has_post) {
9759  /* post arguments */
9760  int post_len = local_body->param.post_num;
9761  int post_start = local_body->param.post_start;
9762 
9763  if (local_body->param.flags.has_rest) {
9764  int j;
9765  for (j=0; j<post_len; j++) {
9766  int idx = local_body->local_table_size - (post_start + j);
9767  ADD_GETLOCAL(args, node, idx, lvar_level);
9768  }
9769  ADD_INSN1(args, node, pushtoarray, INT2FIX(j));
9770  flag |= VM_CALL_ARGS_SPLAT_MUT;
9771  /* argc is settled at above */
9772  }
9773  else {
9774  int j;
9775  for (j=0; j<post_len; j++) {
9776  int idx = local_body->local_table_size - (post_start + j);
9777  ADD_GETLOCAL(args, node, idx, lvar_level);
9778  }
9779  argc = post_len + post_start;
9780  }
9781  }
9782 
9783  if (local_body->param.flags.has_kw) { /* TODO: support keywords */
9784  int local_size = local_body->local_table_size;
9785  argc++;
9786 
9787  ADD_INSN1(args, node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
9788 
9789  if (local_body->param.flags.has_kwrest) {
9790  int idx = local_body->local_table_size - local_kwd->rest_start;
9791  ADD_GETLOCAL(args, node, idx, lvar_level);
9792  RUBY_ASSERT(local_kwd->num > 0);
9793  ADD_SEND (args, node, rb_intern("dup"), INT2FIX(0));
9794  }
9795  else {
9796  ADD_INSN1(args, node, newhash, INT2FIX(0));
9797  }
9798  for (i = 0; i < local_kwd->num; ++i) {
9799  ID id = local_kwd->table[i];
9800  int idx = local_size - get_local_var_idx(liseq, id);
9801  ADD_INSN1(args, node, putobject, ID2SYM(id));
9802  ADD_GETLOCAL(args, node, idx, lvar_level);
9803  }
9804  ADD_SEND(args, node, id_core_hash_merge_ptr, INT2FIX(i * 2 + 1));
9805  flag |= VM_CALL_KW_SPLAT| VM_CALL_KW_SPLAT_MUT;
9806  }
9807  else if (local_body->param.flags.has_kwrest) {
9808  int idx = local_body->local_table_size - local_kwd->rest_start;
9809  ADD_GETLOCAL(args, node, idx, lvar_level);
9810  argc++;
9811  flag |= VM_CALL_KW_SPLAT;
9812  }
9813  }
9814 
9815  if (use_block && parent_block == NULL) {
9816  iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
9817  }
9818 
9819  flag |= VM_CALL_SUPER | VM_CALL_FCALL;
9820  if (type == NODE_ZSUPER) flag |= VM_CALL_ZSUPER;
9821  ADD_INSN(ret, node, putself);
9822  ADD_SEQ(ret, args);
9823 
9824  const struct rb_callinfo * ci = new_callinfo(iseq, 0, argc, flag, keywords, parent_block != NULL);
9825 
9826  if (vm_ci_flag(ci) & VM_CALL_FORWARDING) {
9827  ADD_INSN2(ret, node, invokesuperforward, ci, parent_block);
9828  }
9829  else {
9830  ADD_INSN2(ret, node, invokesuper, ci, parent_block);
9831  }
9832 
9833  if (popped) {
9834  ADD_INSN(ret, node, pop);
9835  }
9836  return COMPILE_OK;
9837 }
9838 
9839 static int
9840 compile_yield(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
9841 {
9842  DECL_ANCHOR(args);
9843  VALUE argc;
9844  unsigned int flag = 0;
9845  struct rb_callinfo_kwarg *keywords = NULL;
9846 
9847  INIT_ANCHOR(args);
9848 
9849  switch (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->type) {
9850  case ISEQ_TYPE_TOP:
9851  case ISEQ_TYPE_MAIN:
9852  case ISEQ_TYPE_CLASS:
9853  COMPILE_ERROR(ERROR_ARGS "Invalid yield");
9854  return COMPILE_NG;
9855  default: /* valid */;
9856  }
9857 
9858  if (RNODE_YIELD(node)->nd_head) {
9859  argc = setup_args(iseq, args, RNODE_YIELD(node)->nd_head, &flag, &keywords);
9860  CHECK(!NIL_P(argc));
9861  }
9862  else {
9863  argc = INT2FIX(0);
9864  }
9865 
9866  ADD_SEQ(ret, args);
9867  ADD_INSN1(ret, node, invokeblock, new_callinfo(iseq, 0, FIX2INT(argc), flag, keywords, FALSE));
9868  iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
9869 
9870  if (popped) {
9871  ADD_INSN(ret, node, pop);
9872  }
9873 
9874  int level = 0;
9875  const rb_iseq_t *tmp_iseq = iseq;
9876  for (; tmp_iseq != ISEQ_BODY(iseq)->local_iseq; level++ ) {
9877  tmp_iseq = ISEQ_BODY(tmp_iseq)->parent_iseq;
9878  }
9879  if (level > 0) access_outer_variables(iseq, level, rb_intern("yield"), true);
9880 
9881  return COMPILE_OK;
9882 }
9883 
9884 static int
9885 compile_match(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped, const enum node_type type)
9886 {
9887  DECL_ANCHOR(recv);
9888  DECL_ANCHOR(val);
9889 
9890  INIT_ANCHOR(recv);
9891  INIT_ANCHOR(val);
9892  switch ((int)type) {
9893  case NODE_MATCH:
9894  ADD_INSN1(recv, node, putobject, rb_node_regx_string_val(node));
9895  ADD_INSN2(val, node, getspecial, INT2FIX(0),
9896  INT2FIX(0));
9897  break;
9898  case NODE_MATCH2:
9899  CHECK(COMPILE(recv, "receiver", RNODE_MATCH2(node)->nd_recv));
9900  CHECK(COMPILE(val, "value", RNODE_MATCH2(node)->nd_value));
9901  break;
9902  case NODE_MATCH3:
9903  CHECK(COMPILE(recv, "receiver", RNODE_MATCH3(node)->nd_value));
9904  CHECK(COMPILE(val, "value", RNODE_MATCH3(node)->nd_recv));
9905  break;
9906  }
9907 
9908  ADD_SEQ(ret, recv);
9909  ADD_SEQ(ret, val);
9910  ADD_SEND(ret, node, idEqTilde, INT2FIX(1));
9911 
9912  if (nd_type_p(node, NODE_MATCH2) && RNODE_MATCH2(node)->nd_args) {
9913  compile_named_capture_assign(iseq, ret, RNODE_MATCH2(node)->nd_args);
9914  }
9915 
9916  if (popped) {
9917  ADD_INSN(ret, node, pop);
9918  }
9919  return COMPILE_OK;
9920 }
9921 
9922 static int
9923 compile_colon2(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
9924 {
9925  if (rb_is_const_id(RNODE_COLON2(node)->nd_mid)) {
9926  /* constant */
9927  VALUE segments;
9928  if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache &&
9929  (segments = collect_const_segments(iseq, node))) {
9930  ISEQ_BODY(iseq)->ic_size++;
9931  ADD_INSN1(ret, node, opt_getconstant_path, segments);
9932  RB_OBJ_WRITTEN(iseq, Qundef, segments);
9933  }
9934  else {
9935  /* constant */
9936  DECL_ANCHOR(pref);
9937  DECL_ANCHOR(body);
9938 
9939  INIT_ANCHOR(pref);
9940  INIT_ANCHOR(body);
9941  CHECK(compile_const_prefix(iseq, node, pref, body));
9942  if (LIST_INSN_SIZE_ZERO(pref)) {
9943  ADD_INSN(ret, node, putnil);
9944  ADD_SEQ(ret, body);
9945  }
9946  else {
9947  ADD_SEQ(ret, pref);
9948  ADD_SEQ(ret, body);
9949  }
9950  }
9951  }
9952  else {
9953  /* function call */
9954  ADD_CALL_RECEIVER(ret, node);
9955  CHECK(COMPILE(ret, "colon2#nd_head", RNODE_COLON2(node)->nd_head));
9956  ADD_CALL(ret, node, RNODE_COLON2(node)->nd_mid, INT2FIX(1));
9957  }
9958  if (popped) {
9959  ADD_INSN(ret, node, pop);
9960  }
9961  return COMPILE_OK;
9962 }
9963 
9964 static int
9965 compile_colon3(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
9966 {
9967  debugi("colon3#nd_mid", RNODE_COLON3(node)->nd_mid);
9968 
9969  /* add cache insn */
9970  if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
9971  ISEQ_BODY(iseq)->ic_size++;
9972  VALUE segments = rb_ary_new_from_args(2, ID2SYM(idNULL), ID2SYM(RNODE_COLON3(node)->nd_mid));
9973  ADD_INSN1(ret, node, opt_getconstant_path, segments);
9974  RB_OBJ_WRITTEN(iseq, Qundef, segments);
9975  }
9976  else {
9977  ADD_INSN1(ret, node, putobject, rb_cObject);
9978  ADD_INSN1(ret, node, putobject, Qtrue);
9979  ADD_INSN1(ret, node, getconstant, ID2SYM(RNODE_COLON3(node)->nd_mid));
9980  }
9981 
9982  if (popped) {
9983  ADD_INSN(ret, node, pop);
9984  }
9985  return COMPILE_OK;
9986 }
9987 
9988 static int
9989 compile_dots(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped, const int excl)
9990 {
9991  VALUE flag = INT2FIX(excl);
9992  const NODE *b = RNODE_DOT2(node)->nd_beg;
9993  const NODE *e = RNODE_DOT2(node)->nd_end;
9994 
9995  if (optimizable_range_item_p(b) && optimizable_range_item_p(e)) {
9996  if (!popped) {
9997  VALUE bv = optimized_range_item(b);
9998  VALUE ev = optimized_range_item(e);
9999  VALUE val = rb_range_new(bv, ev, excl);
10000  ADD_INSN1(ret, node, putobject, val);
10001  RB_OBJ_WRITTEN(iseq, Qundef, val);
10002  }
10003  }
10004  else {
10005  CHECK(COMPILE_(ret, "min", b, popped));
10006  CHECK(COMPILE_(ret, "max", e, popped));
10007  if (!popped) {
10008  ADD_INSN1(ret, node, newrange, flag);
10009  }
10010  }
10011  return COMPILE_OK;
10012 }
10013 
10014 static int
10015 compile_errinfo(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
10016 {
10017  if (!popped) {
10018  if (ISEQ_BODY(iseq)->type == ISEQ_TYPE_RESCUE) {
10019  ADD_GETLOCAL(ret, node, LVAR_ERRINFO, 0);
10020  }
10021  else {
10022  const rb_iseq_t *ip = iseq;
10023  int level = 0;
10024  while (ip) {
10025  if (ISEQ_BODY(ip)->type == ISEQ_TYPE_RESCUE) {
10026  break;
10027  }
10028  ip = ISEQ_BODY(ip)->parent_iseq;
10029  level++;
10030  }
10031  if (ip) {
10032  ADD_GETLOCAL(ret, node, LVAR_ERRINFO, level);
10033  }
10034  else {
10035  ADD_INSN(ret, node, putnil);
10036  }
10037  }
10038  }
10039  return COMPILE_OK;
10040 }
10041 
10042 static int
10043 compile_kw_arg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
10044 {
10045  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
10046  LABEL *end_label = NEW_LABEL(nd_line(node));
10047  const NODE *default_value = get_nd_value(RNODE_KW_ARG(node)->nd_body);
10048 
10049  if (default_value == NODE_SPECIAL_REQUIRED_KEYWORD) {
10050  /* required argument. do nothing */
10051  COMPILE_ERROR(ERROR_ARGS "unreachable");
10052  return COMPILE_NG;
10053  }
10054  else if (nd_type_p(default_value, NODE_SYM) ||
10055  nd_type_p(default_value, NODE_REGX) ||
10056  nd_type_p(default_value, NODE_LINE) ||
10057  nd_type_p(default_value, NODE_INTEGER) ||
10058  nd_type_p(default_value, NODE_FLOAT) ||
10059  nd_type_p(default_value, NODE_RATIONAL) ||
10060  nd_type_p(default_value, NODE_IMAGINARY) ||
10061  nd_type_p(default_value, NODE_NIL) ||
10062  nd_type_p(default_value, NODE_TRUE) ||
10063  nd_type_p(default_value, NODE_FALSE)) {
10064  COMPILE_ERROR(ERROR_ARGS "unreachable");
10065  return COMPILE_NG;
10066  }
10067  else {
10068  /* if keywordcheck(_kw_bits, nth_keyword)
10069  * kw = default_value
10070  * end
10071  */
10072  int kw_bits_idx = body->local_table_size - body->param.keyword->bits_start;
10073  int keyword_idx = body->param.keyword->num;
10074 
10075  ADD_INSN2(ret, node, checkkeyword, INT2FIX(kw_bits_idx + VM_ENV_DATA_SIZE - 1), INT2FIX(keyword_idx));
10076  ADD_INSNL(ret, node, branchif, end_label);
10077  CHECK(COMPILE_POPPED(ret, "keyword default argument", RNODE_KW_ARG(node)->nd_body));
10078  ADD_LABEL(ret, end_label);
10079  }
10080  return COMPILE_OK;
10081 }
10082 
10083 static int
10084 compile_attrasgn(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
10085 {
10086  DECL_ANCHOR(recv);
10087  DECL_ANCHOR(args);
10088  unsigned int flag = 0;
10089  ID mid = RNODE_ATTRASGN(node)->nd_mid;
10090  VALUE argc;
10091  LABEL *else_label = NULL;
10092  VALUE branches = Qfalse;
10093 
10094  /* optimization shortcut
10095  * obj["literal"] = value -> opt_aset_with(obj, "literal", value)
10096  */
10097  if (mid == idASET && !private_recv_p(node) && RNODE_ATTRASGN(node)->nd_args &&
10098  nd_type_p(RNODE_ATTRASGN(node)->nd_args, NODE_LIST) && RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->as.nd_alen == 2 &&
10099  (nd_type_p(RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->nd_head, NODE_STR) || nd_type_p(RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->nd_head, NODE_FILE)) &&
10100  ISEQ_COMPILE_DATA(iseq)->current_block == NULL &&
10101  !frozen_string_literal_p(iseq) &&
10102  ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction)
10103  {
10104  VALUE str = get_string_value(RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->nd_head);
10105  CHECK(COMPILE(ret, "recv", RNODE_ATTRASGN(node)->nd_recv));
10106  CHECK(COMPILE(ret, "value", RNODE_LIST(RNODE_LIST(RNODE_ATTRASGN(node)->nd_args)->nd_next)->nd_head));
10107  if (!popped) {
10108  ADD_INSN(ret, node, swap);
10109  ADD_INSN1(ret, node, topn, INT2FIX(1));
10110  }
10111  ADD_INSN2(ret, node, opt_aset_with, str,
10112  new_callinfo(iseq, idASET, 2, 0, NULL, FALSE));
10113  RB_OBJ_WRITTEN(iseq, Qundef, str);
10114  ADD_INSN(ret, node, pop);
10115  return COMPILE_OK;
10116  }
10117 
10118  INIT_ANCHOR(recv);
10119  INIT_ANCHOR(args);
10120  argc = setup_args(iseq, args, RNODE_ATTRASGN(node)->nd_args, &flag, NULL);
10121  CHECK(!NIL_P(argc));
10122 
10123  int asgnflag = COMPILE_RECV(recv, "recv", node, RNODE_ATTRASGN(node)->nd_recv);
10124  CHECK(asgnflag != -1);
10125  flag |= (unsigned int)asgnflag;
10126 
10127  debugp_param("argc", argc);
10128  debugp_param("nd_mid", ID2SYM(mid));
10129 
10130  if (!rb_is_attrset_id(mid)) {
10131  /* safe nav attr */
10132  mid = rb_id_attrset(mid);
10133  else_label = qcall_branch_start(iseq, recv, &branches, node, node);
10134  }
10135  if (!popped) {
10136  ADD_INSN(ret, node, putnil);
10137  ADD_SEQ(ret, recv);
10138  ADD_SEQ(ret, args);
10139 
10140  if (flag & VM_CALL_ARGS_SPLAT) {
10141  ADD_INSN(ret, node, dup);
10142  ADD_INSN1(ret, node, putobject, INT2FIX(-1));
10143  ADD_SEND_WITH_FLAG(ret, node, idAREF, INT2FIX(1), INT2FIX(asgnflag));
10144  ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 2));
10145  ADD_INSN (ret, node, pop);
10146  }
10147  else {
10148  ADD_INSN1(ret, node, setn, FIXNUM_INC(argc, 1));
10149  }
10150  }
10151  else {
10152  ADD_SEQ(ret, recv);
10153  ADD_SEQ(ret, args);
10154  }
10155  ADD_SEND_WITH_FLAG(ret, node, mid, argc, INT2FIX(flag));
10156  qcall_branch_end(iseq, ret, else_label, branches, node, node);
10157  ADD_INSN(ret, node, pop);
10158  return COMPILE_OK;
10159 }
10160 
10161 static int
10162 compile_make_shareable_node(rb_iseq_t *iseq, LINK_ANCHOR *ret, LINK_ANCHOR *sub, const NODE *value, bool copy)
10163 {
10164  ADD_INSN1(ret, value, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10165  ADD_SEQ(ret, sub);
10166 
10167  if (copy) {
10168  /*
10169  * NEW_CALL(fcore, rb_intern("make_shareable_copy"),
10170  * NEW_LIST(value, loc), loc);
10171  */
10172  ADD_SEND_WITH_FLAG(ret, value, rb_intern("make_shareable_copy"), INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
10173  }
10174  else {
10175  /*
10176  * NEW_CALL(fcore, rb_intern("make_shareable"),
10177  * NEW_LIST(value, loc), loc);
10178  */
10179  ADD_SEND_WITH_FLAG(ret, value, rb_intern("make_shareable"), INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
10180  }
10181 
10182  return COMPILE_OK;
10183 }
10184 
10185 static VALUE
10186 node_const_decl_val(const NODE *node)
10187 {
10188  VALUE path;
10189  switch (nd_type(node)) {
10190  case NODE_CDECL:
10191  if (RNODE_CDECL(node)->nd_vid) {
10192  path = rb_id2str(RNODE_CDECL(node)->nd_vid);
10193  goto end;
10194  }
10195  else {
10196  node = RNODE_CDECL(node)->nd_else;
10197  }
10198  break;
10199  case NODE_COLON2:
10200  break;
10201  case NODE_COLON3:
10202  // ::Const
10203  path = rb_str_new_cstr("::");
10204  rb_str_append(path, rb_id2str(RNODE_COLON3(node)->nd_mid));
10205  goto end;
10206  default:
10207  rb_bug("unexpected node: %s", ruby_node_name(nd_type(node)));
10208  UNREACHABLE_RETURN(0);
10209  }
10210 
10211  path = rb_ary_new();
10212  if (node) {
10213  for (; node && nd_type_p(node, NODE_COLON2); node = RNODE_COLON2(node)->nd_head) {
10214  rb_ary_push(path, rb_id2str(RNODE_COLON2(node)->nd_mid));
10215  }
10216  if (node && nd_type_p(node, NODE_CONST)) {
10217  // Const::Name
10218  rb_ary_push(path, rb_id2str(RNODE_CONST(node)->nd_vid));
10219  }
10220  else if (node && nd_type_p(node, NODE_COLON3)) {
10221  // ::Const::Name
10222  rb_ary_push(path, rb_id2str(RNODE_COLON3(node)->nd_mid));
10223  rb_ary_push(path, rb_str_new(0, 0));
10224  }
10225  else {
10226  // expression::Name
10227  rb_ary_push(path, rb_str_new_cstr("..."));
10228  }
10229  path = rb_ary_join(rb_ary_reverse(path), rb_str_new_cstr("::"));
10230  }
10231  end:
10232  path = rb_fstring(path);
10233  return path;
10234 }
10235 
10236 static VALUE
10237 const_decl_path(NODE *dest)
10238 {
10239  VALUE path = Qnil;
10240  if (!nd_type_p(dest, NODE_CALL)) {
10241  path = node_const_decl_val(dest);
10242  }
10243  return path;
10244 }
10245 
10246 static int
10247 compile_ensure_shareable_node(rb_iseq_t *iseq, LINK_ANCHOR *ret, NODE *dest, const NODE *value)
10248 {
10249  /*
10250  *. RubyVM::FrozenCore.ensure_shareable(value, const_decl_path(dest))
10251  */
10252  VALUE path = const_decl_path(dest);
10253  ADD_INSN1(ret, value, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10254  CHECK(COMPILE(ret, "compile_ensure_shareable_node", value));
10255  ADD_INSN1(ret, value, putobject, path);
10256  RB_OBJ_WRITTEN(iseq, Qundef, path);
10257  ADD_SEND_WITH_FLAG(ret, value, rb_intern("ensure_shareable"), INT2FIX(2), INT2FIX(VM_CALL_ARGS_SIMPLE));
10258 
10259  return COMPILE_OK;
10260 }
10261 
10262 #ifndef SHAREABLE_BARE_EXPRESSION
10263 #define SHAREABLE_BARE_EXPRESSION 1
10264 #endif
10265 
10266 static int
10267 compile_shareable_literal_constant(rb_iseq_t *iseq, LINK_ANCHOR *ret, enum rb_parser_shareability shareable, NODE *dest, const NODE *node, size_t level, VALUE *value_p, int *shareable_literal_p)
10268 {
10269 # define compile_shareable_literal_constant_next(node, anchor, value_p, shareable_literal_p) \
10270  compile_shareable_literal_constant(iseq, anchor, shareable, dest, node, level+1, value_p, shareable_literal_p)
10271  VALUE lit = Qnil;
10272  DECL_ANCHOR(anchor);
10273 
10274  enum node_type type = nd_type(node);
10275  switch (type) {
10276  case NODE_TRUE:
10277  *value_p = Qtrue;
10278  goto compile;
10279  case NODE_FALSE:
10280  *value_p = Qfalse;
10281  goto compile;
10282  case NODE_NIL:
10283  *value_p = Qnil;
10284  goto compile;
10285  case NODE_SYM:
10286  *value_p = rb_node_sym_string_val(node);
10287  goto compile;
10288  case NODE_REGX:
10289  *value_p = rb_node_regx_string_val(node);
10290  goto compile;
10291  case NODE_LINE:
10292  *value_p = rb_node_line_lineno_val(node);
10293  goto compile;
10294  case NODE_INTEGER:
10295  *value_p = rb_node_integer_literal_val(node);
10296  goto compile;
10297  case NODE_FLOAT:
10298  *value_p = rb_node_float_literal_val(node);
10299  goto compile;
10300  case NODE_RATIONAL:
10301  *value_p = rb_node_rational_literal_val(node);
10302  goto compile;
10303  case NODE_IMAGINARY:
10304  *value_p = rb_node_imaginary_literal_val(node);
10305  goto compile;
10306  case NODE_ENCODING:
10307  *value_p = rb_node_encoding_val(node);
10308 
10309  compile:
10310  CHECK(COMPILE(ret, "shareable_literal_constant", node));
10311  *shareable_literal_p = 1;
10312  return COMPILE_OK;
10313 
10314  case NODE_DSTR:
10315  CHECK(COMPILE(ret, "shareable_literal_constant", node));
10316  if (shareable == rb_parser_shareable_literal) {
10317  /*
10318  * NEW_CALL(node, idUMinus, 0, loc);
10319  *
10320  * -"#{var}"
10321  */
10322  ADD_SEND_WITH_FLAG(ret, node, idUMinus, INT2FIX(0), INT2FIX(VM_CALL_ARGS_SIMPLE));
10323  }
10324  *value_p = Qundef;
10325  *shareable_literal_p = 1;
10326  return COMPILE_OK;
10327 
10328  case NODE_STR:{
10329  VALUE lit = rb_node_str_string_val(node);
10330  ADD_INSN1(ret, node, putobject, lit);
10331  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10332  *value_p = lit;
10333  *shareable_literal_p = 1;
10334 
10335  return COMPILE_OK;
10336  }
10337 
10338  case NODE_FILE:{
10339  VALUE lit = rb_node_file_path_val(node);
10340  ADD_INSN1(ret, node, putobject, lit);
10341  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10342  *value_p = lit;
10343  *shareable_literal_p = 1;
10344 
10345  return COMPILE_OK;
10346  }
10347 
10348  case NODE_ZLIST:{
10349  VALUE lit = rb_ary_new();
10350  OBJ_FREEZE(lit);
10351  ADD_INSN1(ret, node, putobject, lit);
10352  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10353  *value_p = lit;
10354  *shareable_literal_p = 1;
10355 
10356  return COMPILE_OK;
10357  }
10358 
10359  case NODE_LIST:{
10360  INIT_ANCHOR(anchor);
10361  lit = rb_ary_new();
10362  for (NODE *n = (NODE *)node; n; n = RNODE_LIST(n)->nd_next) {
10363  VALUE val;
10364  int shareable_literal_p2;
10365  NODE *elt = RNODE_LIST(n)->nd_head;
10366  if (elt) {
10367  CHECK(compile_shareable_literal_constant_next(elt, anchor, &val, &shareable_literal_p2));
10368  if (shareable_literal_p2) {
10369  /* noop */
10370  }
10371  else if (RTEST(lit)) {
10372  rb_ary_clear(lit);
10373  lit = Qfalse;
10374  }
10375  }
10376  if (RTEST(lit)) {
10377  if (!UNDEF_P(val)) {
10378  rb_ary_push(lit, val);
10379  }
10380  else {
10381  rb_ary_clear(lit);
10382  lit = Qnil; /* make shareable at runtime */
10383  }
10384  }
10385  }
10386  break;
10387  }
10388  case NODE_HASH:{
10389  if (!RNODE_HASH(node)->nd_brace) {
10390  *value_p = Qundef;
10391  *shareable_literal_p = 0;
10392  return COMPILE_OK;
10393  }
10394 
10395  INIT_ANCHOR(anchor);
10396  lit = rb_hash_new();
10397  for (NODE *n = RNODE_HASH(node)->nd_head; n; n = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_next) {
10398  VALUE key_val;
10399  VALUE value_val;
10400  int shareable_literal_p2;
10401  NODE *key = RNODE_LIST(n)->nd_head;
10402  NODE *val = RNODE_LIST(RNODE_LIST(n)->nd_next)->nd_head;
10403  if (key) {
10404  CHECK(compile_shareable_literal_constant_next(key, anchor, &key_val, &shareable_literal_p2));
10405  if (shareable_literal_p2) {
10406  /* noop */
10407  }
10408  else if (RTEST(lit)) {
10409  rb_hash_clear(lit);
10410  lit = Qfalse;
10411  }
10412  }
10413  if (val) {
10414  CHECK(compile_shareable_literal_constant_next(val, anchor, &value_val, &shareable_literal_p2));
10415  if (shareable_literal_p2) {
10416  /* noop */
10417  }
10418  else if (RTEST(lit)) {
10419  rb_hash_clear(lit);
10420  lit = Qfalse;
10421  }
10422  }
10423  if (RTEST(lit)) {
10424  if (!UNDEF_P(key_val) && !UNDEF_P(value_val)) {
10425  rb_hash_aset(lit, key_val, value_val);
10426  }
10427  else {
10428  rb_hash_clear(lit);
10429  lit = Qnil; /* make shareable at runtime */
10430  }
10431  }
10432  }
10433  break;
10434  }
10435 
10436  default:
10437  if (shareable == rb_parser_shareable_literal &&
10438  (SHAREABLE_BARE_EXPRESSION || level > 0)) {
10439  CHECK(compile_ensure_shareable_node(iseq, ret, dest, node));
10440  *value_p = Qundef;
10441  *shareable_literal_p = 1;
10442  return COMPILE_OK;
10443  }
10444  CHECK(COMPILE(ret, "shareable_literal_constant", node));
10445  *value_p = Qundef;
10446  *shareable_literal_p = 0;
10447  return COMPILE_OK;
10448  }
10449 
10450  /* Array or Hash */
10451  if (!lit) {
10452  if (nd_type(node) == NODE_LIST) {
10453  ADD_INSN1(anchor, node, newarray, INT2FIX(RNODE_LIST(node)->as.nd_alen));
10454  }
10455  else if (nd_type(node) == NODE_HASH) {
10456  int len = (int)RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10457  ADD_INSN1(anchor, node, newhash, INT2FIX(len));
10458  }
10459  *value_p = Qundef;
10460  *shareable_literal_p = 0;
10461  ADD_SEQ(ret, anchor);
10462  return COMPILE_OK;
10463  }
10464  if (NIL_P(lit)) {
10465  // if shareable_literal, all elements should have been ensured
10466  // as shareable
10467  if (nd_type(node) == NODE_LIST) {
10468  ADD_INSN1(anchor, node, newarray, INT2FIX(RNODE_LIST(node)->as.nd_alen));
10469  }
10470  else if (nd_type(node) == NODE_HASH) {
10471  int len = (int)RNODE_LIST(RNODE_HASH(node)->nd_head)->as.nd_alen;
10472  ADD_INSN1(anchor, node, newhash, INT2FIX(len));
10473  }
10474  CHECK(compile_make_shareable_node(iseq, ret, anchor, node, false));
10475  *value_p = Qundef;
10476  *shareable_literal_p = 1;
10477  }
10478  else {
10479  VALUE val = rb_ractor_make_shareable(lit);
10480  ADD_INSN1(ret, node, putobject, val);
10481  RB_OBJ_WRITTEN(iseq, Qundef, val);
10482  *value_p = val;
10483  *shareable_literal_p = 1;
10484  }
10485 
10486  return COMPILE_OK;
10487 }
10488 
10489 static int
10490 compile_shareable_constant_value(rb_iseq_t *iseq, LINK_ANCHOR *ret, enum rb_parser_shareability shareable, const NODE *lhs, const NODE *value)
10491 {
10492  int literal_p = 0;
10493  VALUE val;
10494  DECL_ANCHOR(anchor);
10495  INIT_ANCHOR(anchor);
10496 
10497  switch (shareable) {
10498  case rb_parser_shareable_none:
10499  CHECK(COMPILE(ret, "compile_shareable_constant_value", value));
10500  return COMPILE_OK;
10501 
10502  case rb_parser_shareable_literal:
10503  CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (NODE *)lhs, value, 0, &val, &literal_p));
10504  ADD_SEQ(ret, anchor);
10505  return COMPILE_OK;
10506 
10507  case rb_parser_shareable_copy:
10508  case rb_parser_shareable_everything:
10509  CHECK(compile_shareable_literal_constant(iseq, anchor, shareable, (NODE *)lhs, value, 0, &val, &literal_p));
10510  if (!literal_p) {
10511  CHECK(compile_make_shareable_node(iseq, ret, anchor, value, shareable == rb_parser_shareable_copy));
10512  }
10513  else {
10514  ADD_SEQ(ret, anchor);
10515  }
10516  return COMPILE_OK;
10517  default:
10518  rb_bug("unexpected rb_parser_shareability: %d", shareable);
10519  }
10520 }
10521 
10522 static int iseq_compile_each0(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped);
10530 static int
10531 iseq_compile_each(rb_iseq_t *iseq, LINK_ANCHOR *ret, const NODE *node, int popped)
10532 {
10533  if (node == 0) {
10534  if (!popped) {
10535  int lineno = ISEQ_COMPILE_DATA(iseq)->last_line;
10536  if (lineno == 0) lineno = FIX2INT(rb_iseq_first_lineno(iseq));
10537  debugs("node: NODE_NIL(implicit)\n");
10538  ADD_SYNTHETIC_INSN(ret, lineno, -1, putnil);
10539  }
10540  return COMPILE_OK;
10541  }
10542  return iseq_compile_each0(iseq, ret, node, popped);
10543 }
10544 
10545 static int
10546 iseq_compile_each0(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const NODE *const node, int popped)
10547 {
10548  const int line = (int)nd_line(node);
10549  const enum node_type type = nd_type(node);
10550  struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
10551 
10552  if (ISEQ_COMPILE_DATA(iseq)->last_line == line) {
10553  /* ignore */
10554  }
10555  else {
10556  if (nd_fl_newline(node)) {
10557  int event = RUBY_EVENT_LINE;
10558  ISEQ_COMPILE_DATA(iseq)->last_line = line;
10559  if (ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq)) {
10560  event |= RUBY_EVENT_COVERAGE_LINE;
10561  }
10562  ADD_TRACE(ret, event);
10563  }
10564  }
10565 
10566  debug_node_start(node);
10567 #undef BEFORE_RETURN
10568 #define BEFORE_RETURN debug_node_end()
10569 
10570  switch (type) {
10571  case NODE_BLOCK:
10572  CHECK(compile_block(iseq, ret, node, popped));
10573  break;
10574  case NODE_IF:
10575  case NODE_UNLESS:
10576  CHECK(compile_if(iseq, ret, node, popped, type));
10577  break;
10578  case NODE_CASE:
10579  CHECK(compile_case(iseq, ret, node, popped));
10580  break;
10581  case NODE_CASE2:
10582  CHECK(compile_case2(iseq, ret, node, popped));
10583  break;
10584  case NODE_CASE3:
10585  CHECK(compile_case3(iseq, ret, node, popped));
10586  break;
10587  case NODE_WHILE:
10588  case NODE_UNTIL:
10589  CHECK(compile_loop(iseq, ret, node, popped, type));
10590  break;
10591  case NODE_FOR:
10592  case NODE_ITER:
10593  CHECK(compile_iter(iseq, ret, node, popped));
10594  break;
10595  case NODE_FOR_MASGN:
10596  CHECK(compile_for_masgn(iseq, ret, node, popped));
10597  break;
10598  case NODE_BREAK:
10599  CHECK(compile_break(iseq, ret, node, popped));
10600  break;
10601  case NODE_NEXT:
10602  CHECK(compile_next(iseq, ret, node, popped));
10603  break;
10604  case NODE_REDO:
10605  CHECK(compile_redo(iseq, ret, node, popped));
10606  break;
10607  case NODE_RETRY:
10608  CHECK(compile_retry(iseq, ret, node, popped));
10609  break;
10610  case NODE_BEGIN:{
10611  CHECK(COMPILE_(ret, "NODE_BEGIN", RNODE_BEGIN(node)->nd_body, popped));
10612  break;
10613  }
10614  case NODE_RESCUE:
10615  CHECK(compile_rescue(iseq, ret, node, popped));
10616  break;
10617  case NODE_RESBODY:
10618  CHECK(compile_resbody(iseq, ret, node, popped));
10619  break;
10620  case NODE_ENSURE:
10621  CHECK(compile_ensure(iseq, ret, node, popped));
10622  break;
10623 
10624  case NODE_AND:
10625  case NODE_OR:{
10626  LABEL *end_label = NEW_LABEL(line);
10627  CHECK(COMPILE(ret, "nd_1st", RNODE_OR(node)->nd_1st));
10628  if (!popped) {
10629  ADD_INSN(ret, node, dup);
10630  }
10631  if (type == NODE_AND) {
10632  ADD_INSNL(ret, node, branchunless, end_label);
10633  }
10634  else {
10635  ADD_INSNL(ret, node, branchif, end_label);
10636  }
10637  if (!popped) {
10638  ADD_INSN(ret, node, pop);
10639  }
10640  CHECK(COMPILE_(ret, "nd_2nd", RNODE_OR(node)->nd_2nd, popped));
10641  ADD_LABEL(ret, end_label);
10642  break;
10643  }
10644 
10645  case NODE_MASGN:{
10646  compile_massign(iseq, ret, node, popped);
10647  break;
10648  }
10649 
10650  case NODE_LASGN:{
10651  ID id = RNODE_LASGN(node)->nd_vid;
10652  int idx = ISEQ_BODY(body->local_iseq)->local_table_size - get_local_var_idx(iseq, id);
10653 
10654  debugs("lvar: %s idx: %d\n", rb_id2name(id), idx);
10655  CHECK(COMPILE(ret, "rvalue", RNODE_LASGN(node)->nd_value));
10656 
10657  if (!popped) {
10658  ADD_INSN(ret, node, dup);
10659  }
10660  ADD_SETLOCAL(ret, node, idx, get_lvar_level(iseq));
10661  break;
10662  }
10663  case NODE_DASGN: {
10664  int idx, lv, ls;
10665  ID id = RNODE_DASGN(node)->nd_vid;
10666  CHECK(COMPILE(ret, "dvalue", RNODE_DASGN(node)->nd_value));
10667  debugi("dassn id", rb_id2str(id) ? id : '*');
10668 
10669  if (!popped) {
10670  ADD_INSN(ret, node, dup);
10671  }
10672 
10673  idx = get_dyna_var_idx(iseq, id, &lv, &ls);
10674 
10675  if (idx < 0) {
10676  COMPILE_ERROR(ERROR_ARGS "NODE_DASGN: unknown id (%"PRIsVALUE")",
10677  rb_id2str(id));
10678  goto ng;
10679  }
10680  ADD_SETLOCAL(ret, node, ls - idx, lv);
10681  break;
10682  }
10683  case NODE_GASGN:{
10684  CHECK(COMPILE(ret, "lvalue", RNODE_GASGN(node)->nd_value));
10685 
10686  if (!popped) {
10687  ADD_INSN(ret, node, dup);
10688  }
10689  ADD_INSN1(ret, node, setglobal, ID2SYM(RNODE_GASGN(node)->nd_vid));
10690  break;
10691  }
10692  case NODE_IASGN:{
10693  CHECK(COMPILE(ret, "lvalue", RNODE_IASGN(node)->nd_value));
10694  if (!popped) {
10695  ADD_INSN(ret, node, dup);
10696  }
10697  ADD_INSN2(ret, node, setinstancevariable,
10698  ID2SYM(RNODE_IASGN(node)->nd_vid),
10699  get_ivar_ic_value(iseq,RNODE_IASGN(node)->nd_vid));
10700  break;
10701  }
10702  case NODE_CDECL:{
10703  if (RNODE_CDECL(node)->nd_vid) {
10704  CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
10705 
10706  if (!popped) {
10707  ADD_INSN(ret, node, dup);
10708  }
10709 
10710  ADD_INSN1(ret, node, putspecialobject,
10711  INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
10712  ADD_INSN1(ret, node, setconstant, ID2SYM(RNODE_CDECL(node)->nd_vid));
10713  }
10714  else {
10715  compile_cpath(ret, iseq, RNODE_CDECL(node)->nd_else);
10716  CHECK(compile_shareable_constant_value(iseq, ret, RNODE_CDECL(node)->shareability, node, RNODE_CDECL(node)->nd_value));
10717  ADD_INSN(ret, node, swap);
10718 
10719  if (!popped) {
10720  ADD_INSN1(ret, node, topn, INT2FIX(1));
10721  ADD_INSN(ret, node, swap);
10722  }
10723 
10724  ADD_INSN1(ret, node, setconstant, ID2SYM(get_node_colon_nd_mid(RNODE_CDECL(node)->nd_else)));
10725  }
10726  break;
10727  }
10728  case NODE_CVASGN:{
10729  CHECK(COMPILE(ret, "cvasgn val", RNODE_CVASGN(node)->nd_value));
10730  if (!popped) {
10731  ADD_INSN(ret, node, dup);
10732  }
10733  ADD_INSN2(ret, node, setclassvariable,
10734  ID2SYM(RNODE_CVASGN(node)->nd_vid),
10735  get_cvar_ic_value(iseq, RNODE_CVASGN(node)->nd_vid));
10736  break;
10737  }
10738  case NODE_OP_ASGN1:
10739  CHECK(compile_op_asgn1(iseq, ret, node, popped));
10740  break;
10741  case NODE_OP_ASGN2:
10742  CHECK(compile_op_asgn2(iseq, ret, node, popped));
10743  break;
10744  case NODE_OP_CDECL:
10745  CHECK(compile_op_cdecl(iseq, ret, node, popped));
10746  break;
10747  case NODE_OP_ASGN_AND:
10748  case NODE_OP_ASGN_OR:
10749  CHECK(compile_op_log(iseq, ret, node, popped, type));
10750  break;
10751  case NODE_CALL: /* obj.foo */
10752  case NODE_OPCALL: /* foo[] */
10753  if (compile_call_precheck_freeze(iseq, ret, node, node, popped) == TRUE) {
10754  break;
10755  }
10756  case NODE_QCALL: /* obj&.foo */
10757  case NODE_FCALL: /* foo() */
10758  case NODE_VCALL: /* foo (variable or call) */
10759  if (compile_call(iseq, ret, node, type, node, popped, false) == COMPILE_NG) {
10760  goto ng;
10761  }
10762  break;
10763  case NODE_SUPER:
10764  case NODE_ZSUPER:
10765  CHECK(compile_super(iseq, ret, node, popped, type));
10766  break;
10767  case NODE_LIST:{
10768  CHECK(compile_array(iseq, ret, node, popped, TRUE) >= 0);
10769  break;
10770  }
10771  case NODE_ZLIST:{
10772  if (!popped) {
10773  ADD_INSN1(ret, node, newarray, INT2FIX(0));
10774  }
10775  break;
10776  }
10777  case NODE_HASH:
10778  CHECK(compile_hash(iseq, ret, node, FALSE, popped) >= 0);
10779  break;
10780  case NODE_RETURN:
10781  CHECK(compile_return(iseq, ret, node, popped));
10782  break;
10783  case NODE_YIELD:
10784  CHECK(compile_yield(iseq, ret, node, popped));
10785  break;
10786  case NODE_LVAR:{
10787  if (!popped) {
10788  compile_lvar(iseq, ret, node, RNODE_LVAR(node)->nd_vid);
10789  }
10790  break;
10791  }
10792  case NODE_DVAR:{
10793  int lv, idx, ls;
10794  debugi("nd_vid", RNODE_DVAR(node)->nd_vid);
10795  if (!popped) {
10796  idx = get_dyna_var_idx(iseq, RNODE_DVAR(node)->nd_vid, &lv, &ls);
10797  if (idx < 0) {
10798  COMPILE_ERROR(ERROR_ARGS "unknown dvar (%"PRIsVALUE")",
10799  rb_id2str(RNODE_DVAR(node)->nd_vid));
10800  goto ng;
10801  }
10802  ADD_GETLOCAL(ret, node, ls - idx, lv);
10803  }
10804  break;
10805  }
10806  case NODE_GVAR:{
10807  ADD_INSN1(ret, node, getglobal, ID2SYM(RNODE_GVAR(node)->nd_vid));
10808  if (popped) {
10809  ADD_INSN(ret, node, pop);
10810  }
10811  break;
10812  }
10813  case NODE_IVAR:{
10814  debugi("nd_vid", RNODE_IVAR(node)->nd_vid);
10815  if (!popped) {
10816  ADD_INSN2(ret, node, getinstancevariable,
10817  ID2SYM(RNODE_IVAR(node)->nd_vid),
10818  get_ivar_ic_value(iseq, RNODE_IVAR(node)->nd_vid));
10819  }
10820  break;
10821  }
10822  case NODE_CONST:{
10823  debugi("nd_vid", RNODE_CONST(node)->nd_vid);
10824 
10825  if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
10826  body->ic_size++;
10827  VALUE segments = rb_ary_new_from_args(1, ID2SYM(RNODE_CONST(node)->nd_vid));
10828  ADD_INSN1(ret, node, opt_getconstant_path, segments);
10829  RB_OBJ_WRITTEN(iseq, Qundef, segments);
10830  }
10831  else {
10832  ADD_INSN(ret, node, putnil);
10833  ADD_INSN1(ret, node, putobject, Qtrue);
10834  ADD_INSN1(ret, node, getconstant, ID2SYM(RNODE_CONST(node)->nd_vid));
10835  }
10836 
10837  if (popped) {
10838  ADD_INSN(ret, node, pop);
10839  }
10840  break;
10841  }
10842  case NODE_CVAR:{
10843  if (!popped) {
10844  ADD_INSN2(ret, node, getclassvariable,
10845  ID2SYM(RNODE_CVAR(node)->nd_vid),
10846  get_cvar_ic_value(iseq, RNODE_CVAR(node)->nd_vid));
10847  }
10848  break;
10849  }
10850  case NODE_NTH_REF:{
10851  if (!popped) {
10852  if (!RNODE_NTH_REF(node)->nd_nth) {
10853  ADD_INSN(ret, node, putnil);
10854  break;
10855  }
10856  ADD_INSN2(ret, node, getspecial, INT2FIX(1) /* '~' */,
10857  INT2FIX(RNODE_NTH_REF(node)->nd_nth << 1));
10858  }
10859  break;
10860  }
10861  case NODE_BACK_REF:{
10862  if (!popped) {
10863  ADD_INSN2(ret, node, getspecial, INT2FIX(1) /* '~' */,
10864  INT2FIX(0x01 | (RNODE_BACK_REF(node)->nd_nth << 1)));
10865  }
10866  break;
10867  }
10868  case NODE_MATCH:
10869  case NODE_MATCH2:
10870  case NODE_MATCH3:
10871  CHECK(compile_match(iseq, ret, node, popped, type));
10872  break;
10873  case NODE_SYM:{
10874  if (!popped) {
10875  ADD_INSN1(ret, node, putobject, rb_node_sym_string_val(node));
10876  }
10877  break;
10878  }
10879  case NODE_LINE:{
10880  if (!popped) {
10881  ADD_INSN1(ret, node, putobject, rb_node_line_lineno_val(node));
10882  }
10883  break;
10884  }
10885  case NODE_ENCODING:{
10886  if (!popped) {
10887  ADD_INSN1(ret, node, putobject, rb_node_encoding_val(node));
10888  }
10889  break;
10890  }
10891  case NODE_INTEGER:{
10892  VALUE lit = rb_node_integer_literal_val(node);
10893  debugp_param("integer", lit);
10894  if (!popped) {
10895  ADD_INSN1(ret, node, putobject, lit);
10896  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10897  }
10898  break;
10899  }
10900  case NODE_FLOAT:{
10901  VALUE lit = rb_node_float_literal_val(node);
10902  debugp_param("float", lit);
10903  if (!popped) {
10904  ADD_INSN1(ret, node, putobject, lit);
10905  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10906  }
10907  break;
10908  }
10909  case NODE_RATIONAL:{
10910  VALUE lit = rb_node_rational_literal_val(node);
10911  debugp_param("rational", lit);
10912  if (!popped) {
10913  ADD_INSN1(ret, node, putobject, lit);
10914  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10915  }
10916  break;
10917  }
10918  case NODE_IMAGINARY:{
10919  VALUE lit = rb_node_imaginary_literal_val(node);
10920  debugp_param("imaginary", lit);
10921  if (!popped) {
10922  ADD_INSN1(ret, node, putobject, lit);
10923  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10924  }
10925  break;
10926  }
10927  case NODE_FILE:
10928  case NODE_STR:{
10929  debugp_param("nd_lit", get_string_value(node));
10930  if (!popped) {
10931  VALUE lit = get_string_value(node);
10932  const rb_compile_option_t *option = ISEQ_COMPILE_DATA(iseq)->option;
10933  if ((option->debug_frozen_string_literal || RTEST(ruby_debug)) &&
10934  option->frozen_string_literal != ISEQ_FROZEN_STRING_LITERAL_DISABLED) {
10935  lit = rb_str_with_debug_created_info(lit, rb_iseq_path(iseq), line);
10936  }
10937  switch (option->frozen_string_literal) {
10938  case ISEQ_FROZEN_STRING_LITERAL_UNSET:
10939  ADD_INSN1(ret, node, putchilledstring, lit);
10940  break;
10941  case ISEQ_FROZEN_STRING_LITERAL_DISABLED:
10942  ADD_INSN1(ret, node, putstring, lit);
10943  break;
10944  case ISEQ_FROZEN_STRING_LITERAL_ENABLED:
10945  ADD_INSN1(ret, node, putobject, lit);
10946  break;
10947  default:
10948  rb_bug("invalid frozen_string_literal");
10949  }
10950  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10951  }
10952  break;
10953  }
10954  case NODE_DSTR:{
10955  compile_dstr(iseq, ret, node);
10956 
10957  if (popped) {
10958  ADD_INSN(ret, node, pop);
10959  }
10960  break;
10961  }
10962  case NODE_XSTR:{
10963  ADD_CALL_RECEIVER(ret, node);
10964  VALUE str = rb_node_str_string_val(node);
10965  ADD_INSN1(ret, node, putobject, str);
10966  RB_OBJ_WRITTEN(iseq, Qundef, str);
10967  ADD_CALL(ret, node, idBackquote, INT2FIX(1));
10968 
10969  if (popped) {
10970  ADD_INSN(ret, node, pop);
10971  }
10972  break;
10973  }
10974  case NODE_DXSTR:{
10975  ADD_CALL_RECEIVER(ret, node);
10976  compile_dstr(iseq, ret, node);
10977  ADD_CALL(ret, node, idBackquote, INT2FIX(1));
10978 
10979  if (popped) {
10980  ADD_INSN(ret, node, pop);
10981  }
10982  break;
10983  }
10984  case NODE_EVSTR:
10985  CHECK(compile_evstr(iseq, ret, RNODE_EVSTR(node)->nd_body, popped));
10986  break;
10987  case NODE_REGX:{
10988  if (!popped) {
10989  VALUE lit = rb_node_regx_string_val(node);
10990  ADD_INSN1(ret, node, putobject, lit);
10991  RB_OBJ_WRITTEN(iseq, Qundef, lit);
10992  }
10993  break;
10994  }
10995  case NODE_DREGX:
10996  compile_dregx(iseq, ret, node, popped);
10997  break;
10998  case NODE_ONCE:{
10999  int ic_index = body->ise_size++;
11000  const rb_iseq_t *block_iseq;
11001  block_iseq = NEW_CHILD_ISEQ(RNODE_ONCE(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_PLAIN, line);
11002 
11003  ADD_INSN2(ret, node, once, block_iseq, INT2FIX(ic_index));
11004  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)block_iseq);
11005 
11006  if (popped) {
11007  ADD_INSN(ret, node, pop);
11008  }
11009  break;
11010  }
11011  case NODE_ARGSCAT:{
11012  if (popped) {
11013  CHECK(COMPILE(ret, "argscat head", RNODE_ARGSCAT(node)->nd_head));
11014  ADD_INSN1(ret, node, splatarray, Qfalse);
11015  ADD_INSN(ret, node, pop);
11016  CHECK(COMPILE(ret, "argscat body", RNODE_ARGSCAT(node)->nd_body));
11017  ADD_INSN1(ret, node, splatarray, Qfalse);
11018  ADD_INSN(ret, node, pop);
11019  }
11020  else {
11021  CHECK(COMPILE(ret, "argscat head", RNODE_ARGSCAT(node)->nd_head));
11022  const NODE *body_node = RNODE_ARGSCAT(node)->nd_body;
11023  if (nd_type_p(body_node, NODE_LIST)) {
11024  CHECK(compile_array(iseq, ret, body_node, popped, FALSE) >= 0);
11025  }
11026  else {
11027  CHECK(COMPILE(ret, "argscat body", body_node));
11028  ADD_INSN(ret, node, concattoarray);
11029  }
11030  }
11031  break;
11032  }
11033  case NODE_ARGSPUSH:{
11034  if (popped) {
11035  CHECK(COMPILE(ret, "argspush head", RNODE_ARGSPUSH(node)->nd_head));
11036  ADD_INSN1(ret, node, splatarray, Qfalse);
11037  ADD_INSN(ret, node, pop);
11038  CHECK(COMPILE_(ret, "argspush body", RNODE_ARGSPUSH(node)->nd_body, popped));
11039  }
11040  else {
11041  CHECK(COMPILE(ret, "argspush head", RNODE_ARGSPUSH(node)->nd_head));
11042  const NODE *body_node = RNODE_ARGSPUSH(node)->nd_body;
11043  if (keyword_node_p(body_node)) {
11044  CHECK(COMPILE_(ret, "array element", body_node, FALSE));
11045  ADD_INSN(ret, node, pushtoarraykwsplat);
11046  }
11047  else if (static_literal_node_p(body_node, iseq, false)) {
11048  ADD_INSN1(ret, body_node, putobject, static_literal_value(body_node, iseq));
11049  ADD_INSN1(ret, node, pushtoarray, INT2FIX(1));
11050  }
11051  else {
11052  CHECK(COMPILE_(ret, "array element", body_node, FALSE));
11053  ADD_INSN1(ret, node, pushtoarray, INT2FIX(1));
11054  }
11055  }
11056  break;
11057  }
11058  case NODE_SPLAT:{
11059  CHECK(COMPILE(ret, "splat", RNODE_SPLAT(node)->nd_head));
11060  ADD_INSN1(ret, node, splatarray, Qtrue);
11061 
11062  if (popped) {
11063  ADD_INSN(ret, node, pop);
11064  }
11065  break;
11066  }
11067  case NODE_DEFN:{
11068  ID mid = RNODE_DEFN(node)->nd_mid;
11069  const rb_iseq_t *method_iseq = NEW_ISEQ(RNODE_DEFN(node)->nd_defn,
11070  rb_id2str(mid),
11071  ISEQ_TYPE_METHOD, line);
11072 
11073  debugp_param("defn/iseq", rb_iseqw_new(method_iseq));
11074  ADD_INSN2(ret, node, definemethod, ID2SYM(mid), method_iseq);
11075  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)method_iseq);
11076 
11077  if (!popped) {
11078  ADD_INSN1(ret, node, putobject, ID2SYM(mid));
11079  }
11080 
11081  break;
11082  }
11083  case NODE_DEFS:{
11084  ID mid = RNODE_DEFS(node)->nd_mid;
11085  const rb_iseq_t * singleton_method_iseq = NEW_ISEQ(RNODE_DEFS(node)->nd_defn,
11086  rb_id2str(mid),
11087  ISEQ_TYPE_METHOD, line);
11088 
11089  debugp_param("defs/iseq", rb_iseqw_new(singleton_method_iseq));
11090  CHECK(COMPILE(ret, "defs: recv", RNODE_DEFS(node)->nd_recv));
11091  ADD_INSN2(ret, node, definesmethod, ID2SYM(mid), singleton_method_iseq);
11092  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)singleton_method_iseq);
11093 
11094  if (!popped) {
11095  ADD_INSN1(ret, node, putobject, ID2SYM(mid));
11096  }
11097  break;
11098  }
11099  case NODE_ALIAS:{
11100  ADD_INSN1(ret, node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11101  ADD_INSN1(ret, node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11102  CHECK(COMPILE(ret, "alias arg1", RNODE_ALIAS(node)->nd_1st));
11103  CHECK(COMPILE(ret, "alias arg2", RNODE_ALIAS(node)->nd_2nd));
11104  ADD_SEND(ret, node, id_core_set_method_alias, INT2FIX(3));
11105 
11106  if (popped) {
11107  ADD_INSN(ret, node, pop);
11108  }
11109  break;
11110  }
11111  case NODE_VALIAS:{
11112  ADD_INSN1(ret, node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11113  ADD_INSN1(ret, node, putobject, ID2SYM(RNODE_VALIAS(node)->nd_alias));
11114  ADD_INSN1(ret, node, putobject, ID2SYM(RNODE_VALIAS(node)->nd_orig));
11115  ADD_SEND(ret, node, id_core_set_variable_alias, INT2FIX(2));
11116 
11117  if (popped) {
11118  ADD_INSN(ret, node, pop);
11119  }
11120  break;
11121  }
11122  case NODE_UNDEF:{
11123  const rb_parser_ary_t *ary = RNODE_UNDEF(node)->nd_undefs;
11124 
11125  for (long i = 0; i < ary->len; i++) {
11126  ADD_INSN1(ret, node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11127  ADD_INSN1(ret, node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CBASE));
11128  CHECK(COMPILE(ret, "undef arg", ary->data[i]));
11129  ADD_SEND(ret, node, id_core_undef_method, INT2FIX(2));
11130 
11131  if (i < ary->len - 1) {
11132  ADD_INSN(ret, node, pop);
11133  }
11134  }
11135 
11136  if (popped) {
11137  ADD_INSN(ret, node, pop);
11138  }
11139  break;
11140  }
11141  case NODE_CLASS:{
11142  const rb_iseq_t *class_iseq = NEW_CHILD_ISEQ(RNODE_CLASS(node)->nd_body,
11143  rb_str_freeze(rb_sprintf("<class:%"PRIsVALUE">", rb_id2str(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)))),
11144  ISEQ_TYPE_CLASS, line);
11145  const int flags = VM_DEFINECLASS_TYPE_CLASS |
11146  (RNODE_CLASS(node)->nd_super ? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS : 0) |
11147  compile_cpath(ret, iseq, RNODE_CLASS(node)->nd_cpath);
11148 
11149  CHECK(COMPILE(ret, "super", RNODE_CLASS(node)->nd_super));
11150  ADD_INSN3(ret, node, defineclass, ID2SYM(get_node_colon_nd_mid(RNODE_CLASS(node)->nd_cpath)), class_iseq, INT2FIX(flags));
11151  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)class_iseq);
11152 
11153  if (popped) {
11154  ADD_INSN(ret, node, pop);
11155  }
11156  break;
11157  }
11158  case NODE_MODULE:{
11159  const rb_iseq_t *module_iseq = NEW_CHILD_ISEQ(RNODE_MODULE(node)->nd_body,
11160  rb_str_freeze(rb_sprintf("<module:%"PRIsVALUE">", rb_id2str(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)))),
11161  ISEQ_TYPE_CLASS, line);
11162  const int flags = VM_DEFINECLASS_TYPE_MODULE |
11163  compile_cpath(ret, iseq, RNODE_MODULE(node)->nd_cpath);
11164 
11165  ADD_INSN (ret, node, putnil); /* dummy */
11166  ADD_INSN3(ret, node, defineclass, ID2SYM(get_node_colon_nd_mid(RNODE_MODULE(node)->nd_cpath)), module_iseq, INT2FIX(flags));
11167  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)module_iseq);
11168 
11169  if (popped) {
11170  ADD_INSN(ret, node, pop);
11171  }
11172  break;
11173  }
11174  case NODE_SCLASS:{
11175  ID singletonclass;
11176  const rb_iseq_t *singleton_class = NEW_ISEQ(RNODE_SCLASS(node)->nd_body, rb_fstring_lit("singleton class"),
11177  ISEQ_TYPE_CLASS, line);
11178 
11179  CHECK(COMPILE(ret, "sclass#recv", RNODE_SCLASS(node)->nd_recv));
11180  ADD_INSN (ret, node, putnil);
11181  CONST_ID(singletonclass, "singletonclass");
11182  ADD_INSN3(ret, node, defineclass,
11183  ID2SYM(singletonclass), singleton_class,
11184  INT2FIX(VM_DEFINECLASS_TYPE_SINGLETON_CLASS));
11185  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)singleton_class);
11186 
11187  if (popped) {
11188  ADD_INSN(ret, node, pop);
11189  }
11190  break;
11191  }
11192  case NODE_COLON2:
11193  CHECK(compile_colon2(iseq, ret, node, popped));
11194  break;
11195  case NODE_COLON3:
11196  CHECK(compile_colon3(iseq, ret, node, popped));
11197  break;
11198  case NODE_DOT2:
11199  CHECK(compile_dots(iseq, ret, node, popped, FALSE));
11200  break;
11201  case NODE_DOT3:
11202  CHECK(compile_dots(iseq, ret, node, popped, TRUE));
11203  break;
11204  case NODE_FLIP2:
11205  case NODE_FLIP3:{
11206  LABEL *lend = NEW_LABEL(line);
11207  LABEL *ltrue = NEW_LABEL(line);
11208  LABEL *lfalse = NEW_LABEL(line);
11209  CHECK(compile_flip_flop(iseq, ret, node, type == NODE_FLIP2,
11210  ltrue, lfalse));
11211  ADD_LABEL(ret, ltrue);
11212  ADD_INSN1(ret, node, putobject, Qtrue);
11213  ADD_INSNL(ret, node, jump, lend);
11214  ADD_LABEL(ret, lfalse);
11215  ADD_INSN1(ret, node, putobject, Qfalse);
11216  ADD_LABEL(ret, lend);
11217  break;
11218  }
11219  case NODE_SELF:{
11220  if (!popped) {
11221  ADD_INSN(ret, node, putself);
11222  }
11223  break;
11224  }
11225  case NODE_NIL:{
11226  if (!popped) {
11227  ADD_INSN(ret, node, putnil);
11228  }
11229  break;
11230  }
11231  case NODE_TRUE:{
11232  if (!popped) {
11233  ADD_INSN1(ret, node, putobject, Qtrue);
11234  }
11235  break;
11236  }
11237  case NODE_FALSE:{
11238  if (!popped) {
11239  ADD_INSN1(ret, node, putobject, Qfalse);
11240  }
11241  break;
11242  }
11243  case NODE_ERRINFO:
11244  CHECK(compile_errinfo(iseq, ret, node, popped));
11245  break;
11246  case NODE_DEFINED:
11247  if (!popped) {
11248  CHECK(compile_defined_expr(iseq, ret, node, Qtrue, false));
11249  }
11250  break;
11251  case NODE_POSTEXE:{
11252  /* compiled to:
11253  * ONCE{ rb_mRubyVMFrozenCore::core#set_postexe{ ... } }
11254  */
11255  int is_index = body->ise_size++;
11257  rb_iseq_new_with_callback_new_callback(build_postexe_iseq, RNODE_POSTEXE(node)->nd_body);
11258  const rb_iseq_t *once_iseq =
11259  new_child_iseq_with_callback(iseq, ifunc,
11260  rb_fstring(make_name_for_block(iseq)), iseq, ISEQ_TYPE_BLOCK, line);
11261 
11262  ADD_INSN2(ret, node, once, once_iseq, INT2FIX(is_index));
11263  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)once_iseq);
11264 
11265  if (popped) {
11266  ADD_INSN(ret, node, pop);
11267  }
11268  break;
11269  }
11270  case NODE_KW_ARG:
11271  CHECK(compile_kw_arg(iseq, ret, node, popped));
11272  break;
11273  case NODE_DSYM:{
11274  compile_dstr(iseq, ret, node);
11275  if (!popped) {
11276  ADD_INSN(ret, node, intern);
11277  }
11278  else {
11279  ADD_INSN(ret, node, pop);
11280  }
11281  break;
11282  }
11283  case NODE_ATTRASGN:
11284  CHECK(compile_attrasgn(iseq, ret, node, popped));
11285  break;
11286  case NODE_LAMBDA:{
11287  /* compile same as lambda{...} */
11288  const rb_iseq_t *block = NEW_CHILD_ISEQ(RNODE_LAMBDA(node)->nd_body, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, line);
11289  VALUE argc = INT2FIX(0);
11290 
11291  ADD_INSN1(ret, node, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
11292  ADD_CALL_WITH_BLOCK(ret, node, idLambda, argc, block);
11293  RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)block);
11294 
11295  if (popped) {
11296  ADD_INSN(ret, node, pop);
11297  }
11298  break;
11299  }
11300  default:
11301  UNKNOWN_NODE("iseq_compile_each", node, COMPILE_NG);
11302  ng:
11303  debug_node_end();
11304  return COMPILE_NG;
11305  }
11306 
11307  debug_node_end();
11308  return COMPILE_OK;
11309 }
11310 
11311 /***************************/
11312 /* instruction information */
11313 /***************************/
11314 
11315 static int
11316 insn_data_length(INSN *iobj)
11317 {
11318  return insn_len(iobj->insn_id);
11319 }
11320 
11321 static int
11322 calc_sp_depth(int depth, INSN *insn)
11323 {
11324  return comptime_insn_stack_increase(depth, insn->insn_id, insn->operands);
11325 }
11326 
11327 static VALUE
11328 opobj_inspect(VALUE obj)
11329 {
11330  if (!SPECIAL_CONST_P(obj) && !RBASIC_CLASS(obj)) {
11331  switch (BUILTIN_TYPE(obj)) {
11332  case T_STRING:
11333  obj = rb_str_new_cstr(RSTRING_PTR(obj));
11334  break;
11335  case T_ARRAY:
11336  obj = rb_ary_dup(obj);
11337  break;
11338  default:
11339  break;
11340  }
11341  }
11342  return rb_inspect(obj);
11343 }
11344 
11345 
11346 
11347 static VALUE
11348 insn_data_to_s_detail(INSN *iobj)
11349 {
11350  VALUE str = rb_sprintf("%-20s ", insn_name(iobj->insn_id));
11351 
11352  if (iobj->operands) {
11353  const char *types = insn_op_types(iobj->insn_id);
11354  int j;
11355 
11356  for (j = 0; types[j]; j++) {
11357  char type = types[j];
11358 
11359  switch (type) {
11360  case TS_OFFSET: /* label(destination position) */
11361  {
11362  LABEL *lobj = (LABEL *)OPERAND_AT(iobj, j);
11363  rb_str_catf(str, LABEL_FORMAT, lobj->label_no);
11364  break;
11365  }
11366  break;
11367  case TS_ISEQ: /* iseq */
11368  {
11369  rb_iseq_t *iseq = (rb_iseq_t *)OPERAND_AT(iobj, j);
11370  VALUE val = Qnil;
11371  if (0 && iseq) { /* TODO: invalidate now */
11372  val = (VALUE)iseq;
11373  }
11374  rb_str_concat(str, opobj_inspect(val));
11375  }
11376  break;
11377  case TS_LINDEX:
11378  case TS_NUM: /* ulong */
11379  case TS_VALUE: /* VALUE */
11380  {
11381  VALUE v = OPERAND_AT(iobj, j);
11382  if (!CLASS_OF(v))
11383  rb_str_cat2(str, "<hidden>");
11384  else {
11385  rb_str_concat(str, opobj_inspect(v));
11386  }
11387  break;
11388  }
11389  case TS_ID: /* ID */
11390  rb_str_concat(str, opobj_inspect(OPERAND_AT(iobj, j)));
11391  break;
11392  case TS_IC: /* inline cache */
11393  rb_str_concat(str, opobj_inspect(OPERAND_AT(iobj, j)));
11394  break;
11395  case TS_IVC: /* inline ivar cache */
11396  rb_str_catf(str, "<ivc:%d>", FIX2INT(OPERAND_AT(iobj, j)));
11397  break;
11398  case TS_ICVARC: /* inline cvar cache */
11399  rb_str_catf(str, "<icvarc:%d>", FIX2INT(OPERAND_AT(iobj, j)));
11400  break;
11401  case TS_ISE: /* inline storage entry */
11402  rb_str_catf(str, "<ise:%d>", FIX2INT(OPERAND_AT(iobj, j)));
11403  break;
11404  case TS_CALLDATA: /* we store these as call infos at compile time */
11405  {
11406  const struct rb_callinfo *ci = (struct rb_callinfo *)OPERAND_AT(iobj, j);
11407  rb_str_cat2(str, "<calldata:");
11408  if (vm_ci_mid(ci)) rb_str_catf(str, "%"PRIsVALUE, rb_id2str(vm_ci_mid(ci)));
11409  rb_str_catf(str, ", %d>", vm_ci_argc(ci));
11410  break;
11411  }
11412  case TS_CDHASH: /* case/when condition cache */
11413  rb_str_cat2(str, "<ch>");
11414  break;
11415  case TS_FUNCPTR:
11416  {
11417  void *func = (void *)OPERAND_AT(iobj, j);
11418 #ifdef HAVE_DLADDR
11419  Dl_info info;
11420  if (dladdr(func, &info) && info.dli_sname) {
11421  rb_str_cat2(str, info.dli_sname);
11422  break;
11423  }
11424 #endif
11425  rb_str_catf(str, "<%p>", func);
11426  }
11427  break;
11428  case TS_BUILTIN:
11429  rb_str_cat2(str, "<TS_BUILTIN>");
11430  break;
11431  default:{
11432  rb_raise(rb_eSyntaxError, "unknown operand type: %c", type);
11433  }
11434  }
11435  if (types[j + 1]) {
11436  rb_str_cat2(str, ", ");
11437  }
11438  }
11439  }
11440  return str;
11441 }
11442 
11443 static void
11444 dump_disasm_list(const LINK_ELEMENT *link)
11445 {
11446  dump_disasm_list_with_cursor(link, NULL, NULL);
11447 }
11448 
11449 static void
11450 dump_disasm_list_with_cursor(const LINK_ELEMENT *link, const LINK_ELEMENT *curr, const LABEL *dest)
11451 {
11452  int pos = 0;
11453  INSN *iobj;
11454  LABEL *lobj;
11455  VALUE str;
11456 
11457  printf("-- raw disasm--------\n");
11458 
11459  while (link) {
11460  if (curr) printf(curr == link ? "*" : " ");
11461  switch (link->type) {
11462  case ISEQ_ELEMENT_INSN:
11463  {
11464  iobj = (INSN *)link;
11465  str = insn_data_to_s_detail(iobj);
11466  printf(" %04d %-65s(%4u)\n", pos, StringValueCStr(str), iobj->insn_info.line_no);
11467  pos += insn_data_length(iobj);
11468  break;
11469  }
11470  case ISEQ_ELEMENT_LABEL:
11471  {
11472  lobj = (LABEL *)link;
11473  printf(LABEL_FORMAT" [sp: %d, unremovable: %d, refcnt: %d]%s\n", lobj->label_no, lobj->sp, lobj->unremovable, lobj->refcnt,
11474  dest == lobj ? " <---" : "");
11475  break;
11476  }
11477  case ISEQ_ELEMENT_TRACE:
11478  {
11479  TRACE *trace = (TRACE *)link;
11480  printf(" trace: %0x\n", trace->event);
11481  break;
11482  }
11483  case ISEQ_ELEMENT_ADJUST:
11484  {
11485  ADJUST *adjust = (ADJUST *)link;
11486  printf(" adjust: [label: %d]\n", adjust->label ? adjust->label->label_no : -1);
11487  break;
11488  }
11489  default:
11490  /* ignore */
11491  rb_raise(rb_eSyntaxError, "dump_disasm_list error: %d\n", (int)link->type);
11492  }
11493  link = link->next;
11494  }
11495  printf("---------------------\n");
11496  fflush(stdout);
11497 }
11498 
11499 int
11500 rb_insn_len(VALUE insn)
11501 {
11502  return insn_len(insn);
11503 }
11504 
11505 const char *
11506 rb_insns_name(int i)
11507 {
11508  return insn_name(i);
11509 }
11510 
11511 VALUE
11512 rb_insns_name_array(void)
11513 {
11514  VALUE ary = rb_ary_new_capa(VM_INSTRUCTION_SIZE);
11515  int i;
11516  for (i = 0; i < VM_INSTRUCTION_SIZE; i++) {
11517  rb_ary_push(ary, rb_fstring_cstr(insn_name(i)));
11518  }
11519  return rb_ary_freeze(ary);
11520 }
11521 
11522 static LABEL *
11523 register_label(rb_iseq_t *iseq, struct st_table *labels_table, VALUE obj)
11524 {
11525  LABEL *label = 0;
11526  st_data_t tmp;
11527  obj = rb_to_symbol_type(obj);
11528 
11529  if (st_lookup(labels_table, obj, &tmp) == 0) {
11530  label = NEW_LABEL(0);
11531  st_insert(labels_table, obj, (st_data_t)label);
11532  }
11533  else {
11534  label = (LABEL *)tmp;
11535  }
11536  LABEL_REF(label);
11537  return label;
11538 }
11539 
11540 static VALUE
11541 get_exception_sym2type(VALUE sym)
11542 {
11543  static VALUE symRescue, symEnsure, symRetry;
11544  static VALUE symBreak, symRedo, symNext;
11545 
11546  if (symRescue == 0) {
11547  symRescue = ID2SYM(rb_intern_const("rescue"));
11548  symEnsure = ID2SYM(rb_intern_const("ensure"));
11549  symRetry = ID2SYM(rb_intern_const("retry"));
11550  symBreak = ID2SYM(rb_intern_const("break"));
11551  symRedo = ID2SYM(rb_intern_const("redo"));
11552  symNext = ID2SYM(rb_intern_const("next"));
11553  }
11554 
11555  if (sym == symRescue) return CATCH_TYPE_RESCUE;
11556  if (sym == symEnsure) return CATCH_TYPE_ENSURE;
11557  if (sym == symRetry) return CATCH_TYPE_RETRY;
11558  if (sym == symBreak) return CATCH_TYPE_BREAK;
11559  if (sym == symRedo) return CATCH_TYPE_REDO;
11560  if (sym == symNext) return CATCH_TYPE_NEXT;
11561  rb_raise(rb_eSyntaxError, "invalid exception symbol: %+"PRIsVALUE, sym);
11562  return 0;
11563 }
11564 
11565 static int
11566 iseq_build_from_ary_exception(rb_iseq_t *iseq, struct st_table *labels_table,
11567  VALUE exception)
11568 {
11569  int i;
11570 
11571  for (i=0; i<RARRAY_LEN(exception); i++) {
11572  const rb_iseq_t *eiseq;
11573  VALUE v, type;
11574  LABEL *lstart, *lend, *lcont;
11575  unsigned int sp;
11576 
11577  v = rb_to_array_type(RARRAY_AREF(exception, i));
11578  if (RARRAY_LEN(v) != 6) {
11579  rb_raise(rb_eSyntaxError, "wrong exception entry");
11580  }
11581  type = get_exception_sym2type(RARRAY_AREF(v, 0));
11582  if (NIL_P(RARRAY_AREF(v, 1))) {
11583  eiseq = NULL;
11584  }
11585  else {
11586  eiseq = rb_iseqw_to_iseq(rb_iseq_load(RARRAY_AREF(v, 1), (VALUE)iseq, Qnil));
11587  }
11588 
11589  lstart = register_label(iseq, labels_table, RARRAY_AREF(v, 2));
11590  lend = register_label(iseq, labels_table, RARRAY_AREF(v, 3));
11591  lcont = register_label(iseq, labels_table, RARRAY_AREF(v, 4));
11592  sp = NUM2UINT(RARRAY_AREF(v, 5));
11593 
11594  /* TODO: Dirty Hack! Fix me */
11595  if (type == CATCH_TYPE_RESCUE ||
11596  type == CATCH_TYPE_BREAK ||
11597  type == CATCH_TYPE_NEXT) {
11598  ++sp;
11599  }
11600 
11601  lcont->sp = sp;
11602 
11603  ADD_CATCH_ENTRY(type, lstart, lend, eiseq, lcont);
11604 
11605  RB_GC_GUARD(v);
11606  }
11607  return COMPILE_OK;
11608 }
11609 
11610 static struct st_table *
11611 insn_make_insn_table(void)
11612 {
11613  struct st_table *table;
11614  int i;
11615  table = st_init_numtable_with_size(VM_INSTRUCTION_SIZE);
11616 
11617  for (i=0; i<VM_INSTRUCTION_SIZE; i++) {
11618  st_insert(table, ID2SYM(rb_intern_const(insn_name(i))), i);
11619  }
11620 
11621  return table;
11622 }
11623 
11624 static const rb_iseq_t *
11625 iseq_build_load_iseq(const rb_iseq_t *iseq, VALUE op)
11626 {
11627  VALUE iseqw;
11628  const rb_iseq_t *loaded_iseq;
11629 
11630  if (RB_TYPE_P(op, T_ARRAY)) {
11631  iseqw = rb_iseq_load(op, (VALUE)iseq, Qnil);
11632  }
11633  else if (CLASS_OF(op) == rb_cISeq) {
11634  iseqw = op;
11635  }
11636  else {
11637  rb_raise(rb_eSyntaxError, "ISEQ is required");
11638  }
11639 
11640  loaded_iseq = rb_iseqw_to_iseq(iseqw);
11641  return loaded_iseq;
11642 }
11643 
11644 static VALUE
11645 iseq_build_callinfo_from_hash(rb_iseq_t *iseq, VALUE op)
11646 {
11647  ID mid = 0;
11648  int orig_argc = 0;
11649  unsigned int flag = 0;
11650  struct rb_callinfo_kwarg *kw_arg = 0;
11651 
11652  if (!NIL_P(op)) {
11653  VALUE vmid = rb_hash_aref(op, ID2SYM(rb_intern_const("mid")));
11654  VALUE vflag = rb_hash_aref(op, ID2SYM(rb_intern_const("flag")));
11655  VALUE vorig_argc = rb_hash_aref(op, ID2SYM(rb_intern_const("orig_argc")));
11656  VALUE vkw_arg = rb_hash_aref(op, ID2SYM(rb_intern_const("kw_arg")));
11657 
11658  if (!NIL_P(vmid)) mid = SYM2ID(vmid);
11659  if (!NIL_P(vflag)) flag = NUM2UINT(vflag);
11660  if (!NIL_P(vorig_argc)) orig_argc = FIX2INT(vorig_argc);
11661 
11662  if (!NIL_P(vkw_arg)) {
11663  int i;
11664  int len = RARRAY_LENINT(vkw_arg);
11665  size_t n = rb_callinfo_kwarg_bytes(len);
11666 
11667  kw_arg = xmalloc(n);
11668  kw_arg->references = 0;
11669  kw_arg->keyword_len = len;
11670  for (i = 0; i < len; i++) {
11671  VALUE kw = RARRAY_AREF(vkw_arg, i);
11672  SYM2ID(kw); /* make immortal */
11673  kw_arg->keywords[i] = kw;
11674  }
11675  }
11676  }
11677 
11678  const struct rb_callinfo *ci = new_callinfo(iseq, mid, orig_argc, flag, kw_arg, (flag & VM_CALL_ARGS_SIMPLE) == 0);
11679  RB_OBJ_WRITTEN(iseq, Qundef, ci);
11680  return (VALUE)ci;
11681 }
11682 
11683 static rb_event_flag_t
11684 event_name_to_flag(VALUE sym)
11685 {
11686 #define CHECK_EVENT(ev) if (sym == ID2SYM(rb_intern_const(#ev))) return ev;
11687  CHECK_EVENT(RUBY_EVENT_LINE);
11688  CHECK_EVENT(RUBY_EVENT_CLASS);
11689  CHECK_EVENT(RUBY_EVENT_END);
11690  CHECK_EVENT(RUBY_EVENT_CALL);
11691  CHECK_EVENT(RUBY_EVENT_RETURN);
11692  CHECK_EVENT(RUBY_EVENT_B_CALL);
11693  CHECK_EVENT(RUBY_EVENT_B_RETURN);
11694  CHECK_EVENT(RUBY_EVENT_RESCUE);
11695 #undef CHECK_EVENT
11696  return RUBY_EVENT_NONE;
11697 }
11698 
11699 static int
11700 iseq_build_from_ary_body(rb_iseq_t *iseq, LINK_ANCHOR *const anchor,
11701  VALUE body, VALUE node_ids, VALUE labels_wrapper)
11702 {
11703  /* TODO: body should be frozen */
11704  long i, len = RARRAY_LEN(body);
11705  struct st_table *labels_table = RTYPEDDATA_DATA(labels_wrapper);
11706  int j;
11707  int line_no = 0, node_id = -1, insn_idx = 0;
11708  int ret = COMPILE_OK;
11709 
11710  /*
11711  * index -> LABEL *label
11712  */
11713  static struct st_table *insn_table;
11714 
11715  if (insn_table == 0) {
11716  insn_table = insn_make_insn_table();
11717  }
11718 
11719  for (i=0; i<len; i++) {
11720  VALUE obj = RARRAY_AREF(body, i);
11721 
11722  if (SYMBOL_P(obj)) {
11723  rb_event_flag_t event;
11724  if ((event = event_name_to_flag(obj)) != RUBY_EVENT_NONE) {
11725  ADD_TRACE(anchor, event);
11726  }
11727  else {
11728  LABEL *label = register_label(iseq, labels_table, obj);
11729  ADD_LABEL(anchor, label);
11730  }
11731  }
11732  else if (FIXNUM_P(obj)) {
11733  line_no = NUM2INT(obj);
11734  }
11735  else if (RB_TYPE_P(obj, T_ARRAY)) {
11736  VALUE *argv = 0;
11737  int argc = RARRAY_LENINT(obj) - 1;
11738  st_data_t insn_id;
11739  VALUE insn;
11740 
11741  if (node_ids) {
11742  node_id = NUM2INT(rb_ary_entry(node_ids, insn_idx++));
11743  }
11744 
11745  insn = (argc < 0) ? Qnil : RARRAY_AREF(obj, 0);
11746  if (st_lookup(insn_table, (st_data_t)insn, &insn_id) == 0) {
11747  /* TODO: exception */
11748  COMPILE_ERROR(iseq, line_no,
11749  "unknown instruction: %+"PRIsVALUE, insn);
11750  ret = COMPILE_NG;
11751  break;
11752  }
11753 
11754  if (argc != insn_len((VALUE)insn_id)-1) {
11755  COMPILE_ERROR(iseq, line_no,
11756  "operand size mismatch");
11757  ret = COMPILE_NG;
11758  break;
11759  }
11760 
11761  if (argc > 0) {
11762  argv = compile_data_calloc2(iseq, sizeof(VALUE), argc);
11763 
11764  // add element before operand setup to make GC root
11765  ADD_ELEM(anchor,
11766  (LINK_ELEMENT*)new_insn_core(iseq, line_no, node_id,
11767  (enum ruby_vminsn_type)insn_id, argc, argv));
11768 
11769  for (j=0; j<argc; j++) {
11770  VALUE op = rb_ary_entry(obj, j+1);
11771  switch (insn_op_type((VALUE)insn_id, j)) {
11772  case TS_OFFSET: {
11773  LABEL *label = register_label(iseq, labels_table, op);
11774  argv[j] = (VALUE)label;
11775  break;
11776  }
11777  case TS_LINDEX:
11778  case TS_NUM:
11779  (void)NUM2INT(op);
11780  argv[j] = op;
11781  break;
11782  case TS_VALUE:
11783  argv[j] = op;
11784  RB_OBJ_WRITTEN(iseq, Qundef, op);
11785  break;
11786  case TS_ISEQ:
11787  {
11788  if (op != Qnil) {
11789  VALUE v = (VALUE)iseq_build_load_iseq(iseq, op);
11790  argv[j] = v;
11791  RB_OBJ_WRITTEN(iseq, Qundef, v);
11792  }
11793  else {
11794  argv[j] = 0;
11795  }
11796  }
11797  break;
11798  case TS_ISE:
11799  argv[j] = op;
11800  if (NUM2UINT(op) >= ISEQ_BODY(iseq)->ise_size) {
11801  ISEQ_BODY(iseq)->ise_size = NUM2INT(op) + 1;
11802  }
11803  break;
11804  case TS_IC:
11805  {
11806  VALUE segments = rb_ary_new();
11807  op = rb_to_array_type(op);
11808 
11809  for (int i = 0; i < RARRAY_LEN(op); i++) {
11810  VALUE sym = RARRAY_AREF(op, i);
11811  sym = rb_to_symbol_type(sym);
11812  rb_ary_push(segments, sym);
11813  }
11814 
11815  RB_GC_GUARD(op);
11816  argv[j] = segments;
11817  RB_OBJ_WRITTEN(iseq, Qundef, segments);
11818  ISEQ_BODY(iseq)->ic_size++;
11819  }
11820  break;
11821  case TS_IVC: /* inline ivar cache */
11822  argv[j] = op;
11823  if (NUM2UINT(op) >= ISEQ_BODY(iseq)->ivc_size) {
11824  ISEQ_BODY(iseq)->ivc_size = NUM2INT(op) + 1;
11825  }
11826  break;
11827  case TS_ICVARC: /* inline cvar cache */
11828  argv[j] = op;
11829  if (NUM2UINT(op) >= ISEQ_BODY(iseq)->icvarc_size) {
11830  ISEQ_BODY(iseq)->icvarc_size = NUM2INT(op) + 1;
11831  }
11832  break;
11833  case TS_CALLDATA:
11834  argv[j] = iseq_build_callinfo_from_hash(iseq, op);
11835  break;
11836  case TS_ID:
11837  argv[j] = rb_to_symbol_type(op);
11838  break;
11839  case TS_CDHASH:
11840  {
11841  int i;
11842  VALUE map = rb_hash_new_with_size(RARRAY_LEN(op)/2);
11843 
11844  RHASH_TBL_RAW(map)->type = &cdhash_type;
11845  op = rb_to_array_type(op);
11846  for (i=0; i<RARRAY_LEN(op); i+=2) {
11847  VALUE key = RARRAY_AREF(op, i);
11848  VALUE sym = RARRAY_AREF(op, i+1);
11849  LABEL *label =
11850  register_label(iseq, labels_table, sym);
11851  rb_hash_aset(map, key, (VALUE)label | 1);
11852  }
11853  RB_GC_GUARD(op);
11854  argv[j] = map;
11855  RB_OBJ_WRITTEN(iseq, Qundef, map);
11856  }
11857  break;
11858  case TS_FUNCPTR:
11859  {
11860 #if SIZEOF_VALUE <= SIZEOF_LONG
11861  long funcptr = NUM2LONG(op);
11862 #else
11863  LONG_LONG funcptr = NUM2LL(op);
11864 #endif
11865  argv[j] = (VALUE)funcptr;
11866  }
11867  break;
11868  default:
11869  rb_raise(rb_eSyntaxError, "unknown operand: %c", insn_op_type((VALUE)insn_id, j));
11870  }
11871  }
11872  }
11873  else {
11874  ADD_ELEM(anchor,
11875  (LINK_ELEMENT*)new_insn_core(iseq, line_no, node_id,
11876  (enum ruby_vminsn_type)insn_id, argc, NULL));
11877  }
11878  }
11879  else {
11880  rb_raise(rb_eTypeError, "unexpected object for instruction");
11881  }
11882  }
11883  RTYPEDDATA_DATA(labels_wrapper) = 0;
11884  RB_GC_GUARD(labels_wrapper);
11885  validate_labels(iseq, labels_table);
11886  if (!ret) return ret;
11887  return iseq_setup(iseq, anchor);
11888 }
11889 
11890 #define CHECK_ARRAY(v) rb_to_array_type(v)
11891 #define CHECK_SYMBOL(v) rb_to_symbol_type(v)
11892 
11893 static int
11894 int_param(int *dst, VALUE param, VALUE sym)
11895 {
11896  VALUE val = rb_hash_aref(param, sym);
11897  if (FIXNUM_P(val)) {
11898  *dst = FIX2INT(val);
11899  return TRUE;
11900  }
11901  else if (!NIL_P(val)) {
11902  rb_raise(rb_eTypeError, "invalid %+"PRIsVALUE" Fixnum: %+"PRIsVALUE,
11903  sym, val);
11904  }
11905  return FALSE;
11906 }
11907 
11908 static const struct rb_iseq_param_keyword *
11909 iseq_build_kw(rb_iseq_t *iseq, VALUE params, VALUE keywords)
11910 {
11911  int i, j;
11912  int len = RARRAY_LENINT(keywords);
11913  int default_len;
11914  VALUE key, sym, default_val;
11915  VALUE *dvs;
11916  ID *ids;
11917  struct rb_iseq_param_keyword *keyword = ZALLOC(struct rb_iseq_param_keyword);
11918 
11919  ISEQ_BODY(iseq)->param.flags.has_kw = TRUE;
11920 
11921  keyword->num = len;
11922 #define SYM(s) ID2SYM(rb_intern_const(#s))
11923  (void)int_param(&keyword->bits_start, params, SYM(kwbits));
11924  i = keyword->bits_start - keyword->num;
11925  ids = (ID *)&ISEQ_BODY(iseq)->local_table[i];
11926 #undef SYM
11927 
11928  /* required args */
11929  for (i = 0; i < len; i++) {
11930  VALUE val = RARRAY_AREF(keywords, i);
11931 
11932  if (!SYMBOL_P(val)) {
11933  goto default_values;
11934  }
11935  ids[i] = SYM2ID(val);
11936  keyword->required_num++;
11937  }
11938 
11939  default_values: /* note: we intentionally preserve `i' from previous loop */
11940  default_len = len - i;
11941  if (default_len == 0) {
11942  keyword->table = ids;
11943  return keyword;
11944  }
11945  else if (default_len < 0) {
11946  UNREACHABLE;
11947  }
11948 
11949  dvs = ALLOC_N(VALUE, (unsigned int)default_len);
11950 
11951  for (j = 0; i < len; i++, j++) {
11952  key = RARRAY_AREF(keywords, i);
11953  CHECK_ARRAY(key);
11954 
11955  switch (RARRAY_LEN(key)) {
11956  case 1:
11957  sym = RARRAY_AREF(key, 0);
11958  default_val = Qundef;
11959  break;
11960  case 2:
11961  sym = RARRAY_AREF(key, 0);
11962  default_val = RARRAY_AREF(key, 1);
11963  break;
11964  default:
11965  rb_raise(rb_eTypeError, "keyword default has unsupported len %+"PRIsVALUE, key);
11966  }
11967  ids[i] = SYM2ID(sym);
11968  RB_OBJ_WRITE(iseq, &dvs[j], default_val);
11969  }
11970 
11971  keyword->table = ids;
11972  keyword->default_values = dvs;
11973 
11974  return keyword;
11975 }
11976 
11977 static void
11978 iseq_insn_each_object_mark_and_pin(VALUE obj, VALUE _)
11979 {
11980  rb_gc_mark(obj);
11981 }
11982 
11983 void
11984 rb_iseq_mark_and_pin_insn_storage(struct iseq_compile_data_storage *storage)
11985 {
11986  INSN *iobj = 0;
11987  size_t size = sizeof(INSN);
11988  unsigned int pos = 0;
11989 
11990  while (storage) {
11991 #ifdef STRICT_ALIGNMENT
11992  size_t padding = calc_padding((void *)&storage->buff[pos], size);
11993 #else
11994  const size_t padding = 0; /* expected to be optimized by compiler */
11995 #endif /* STRICT_ALIGNMENT */
11996  size_t offset = pos + size + padding;
11997  if (offset > storage->size || offset > storage->pos) {
11998  pos = 0;
11999  storage = storage->next;
12000  }
12001  else {
12002 #ifdef STRICT_ALIGNMENT
12003  pos += (int)padding;
12004 #endif /* STRICT_ALIGNMENT */
12005 
12006  iobj = (INSN *)&storage->buff[pos];
12007 
12008  if (iobj->operands) {
12009  iseq_insn_each_markable_object(iobj, iseq_insn_each_object_mark_and_pin, (VALUE)0);
12010  }
12011  pos += (int)size;
12012  }
12013  }
12014 }
12015 
12016 static const rb_data_type_t labels_wrapper_type = {
12017  .wrap_struct_name = "compiler/labels_wrapper",
12018  .function = {
12019  .dmark = (RUBY_DATA_FUNC)rb_mark_set,
12020  .dfree = (RUBY_DATA_FUNC)st_free_table,
12021  },
12022  .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED,
12023 };
12024 
12025 void
12026 rb_iseq_build_from_ary(rb_iseq_t *iseq, VALUE misc, VALUE locals, VALUE params,
12027  VALUE exception, VALUE body)
12028 {
12029 #define SYM(s) ID2SYM(rb_intern_const(#s))
12030  int i, len;
12031  unsigned int arg_size, local_size, stack_max;
12032  ID *tbl;
12033  struct st_table *labels_table = st_init_numtable();
12034  VALUE labels_wrapper = TypedData_Wrap_Struct(0, &labels_wrapper_type, labels_table);
12035  VALUE arg_opt_labels = rb_hash_aref(params, SYM(opt));
12036  VALUE keywords = rb_hash_aref(params, SYM(keyword));
12037  VALUE sym_arg_rest = ID2SYM(rb_intern_const("#arg_rest"));
12038  DECL_ANCHOR(anchor);
12039  INIT_ANCHOR(anchor);
12040 
12041  len = RARRAY_LENINT(locals);
12042  ISEQ_BODY(iseq)->local_table_size = len;
12043  ISEQ_BODY(iseq)->local_table = tbl = len > 0 ? (ID *)ALLOC_N(ID, ISEQ_BODY(iseq)->local_table_size) : NULL;
12044 
12045  for (i = 0; i < len; i++) {
12046  VALUE lv = RARRAY_AREF(locals, i);
12047 
12048  if (sym_arg_rest == lv) {
12049  tbl[i] = 0;
12050  }
12051  else {
12052  tbl[i] = FIXNUM_P(lv) ? (ID)FIX2LONG(lv) : SYM2ID(CHECK_SYMBOL(lv));
12053  }
12054  }
12055 
12056 #define INT_PARAM(F) int_param(&ISEQ_BODY(iseq)->param.F, params, SYM(F))
12057  if (INT_PARAM(lead_num)) {
12058  ISEQ_BODY(iseq)->param.flags.has_lead = TRUE;
12059  }
12060  if (INT_PARAM(post_num)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12061  if (INT_PARAM(post_start)) ISEQ_BODY(iseq)->param.flags.has_post = TRUE;
12062  if (INT_PARAM(rest_start)) ISEQ_BODY(iseq)->param.flags.has_rest = TRUE;
12063  if (INT_PARAM(block_start)) ISEQ_BODY(iseq)->param.flags.has_block = TRUE;
12064 #undef INT_PARAM
12065  {
12066 #define INT_PARAM(F) F = (int_param(&x, misc, SYM(F)) ? (unsigned int)x : 0)
12067  int x;
12068  INT_PARAM(arg_size);
12069  INT_PARAM(local_size);
12070  INT_PARAM(stack_max);
12071 #undef INT_PARAM
12072  }
12073 
12074  VALUE node_ids = Qfalse;
12075 #ifdef USE_ISEQ_NODE_ID
12076  node_ids = rb_hash_aref(misc, ID2SYM(rb_intern("node_ids")));
12077  if (!RB_TYPE_P(node_ids, T_ARRAY)) {
12078  rb_raise(rb_eTypeError, "node_ids is not an array");
12079  }
12080 #endif
12081 
12082  if (RB_TYPE_P(arg_opt_labels, T_ARRAY)) {
12083  len = RARRAY_LENINT(arg_opt_labels);
12084  ISEQ_BODY(iseq)->param.flags.has_opt = !!(len - 1 >= 0);
12085 
12086  if (ISEQ_BODY(iseq)->param.flags.has_opt) {
12087  VALUE *opt_table = ALLOC_N(VALUE, len);
12088 
12089  for (i = 0; i < len; i++) {
12090  VALUE ent = RARRAY_AREF(arg_opt_labels, i);
12091  LABEL *label = register_label(iseq, labels_table, ent);
12092  opt_table[i] = (VALUE)label;
12093  }
12094 
12095  ISEQ_BODY(iseq)->param.opt_num = len - 1;
12096  ISEQ_BODY(iseq)->param.opt_table = opt_table;
12097  }
12098  }
12099  else if (!NIL_P(arg_opt_labels)) {
12100  rb_raise(rb_eTypeError, ":opt param is not an array: %+"PRIsVALUE,
12101  arg_opt_labels);
12102  }
12103 
12104  if (RB_TYPE_P(keywords, T_ARRAY)) {
12105  ISEQ_BODY(iseq)->param.keyword = iseq_build_kw(iseq, params, keywords);
12106  }
12107  else if (!NIL_P(keywords)) {
12108  rb_raise(rb_eTypeError, ":keywords param is not an array: %+"PRIsVALUE,
12109  keywords);
12110  }
12111 
12112  if (Qtrue == rb_hash_aref(params, SYM(ambiguous_param0))) {
12113  ISEQ_BODY(iseq)->param.flags.ambiguous_param0 = TRUE;
12114  }
12115 
12116  if (Qtrue == rb_hash_aref(params, SYM(use_block))) {
12117  ISEQ_BODY(iseq)->param.flags.use_block = TRUE;
12118  }
12119 
12120  if (int_param(&i, params, SYM(kwrest))) {
12121  struct rb_iseq_param_keyword *keyword = (struct rb_iseq_param_keyword *)ISEQ_BODY(iseq)->param.keyword;
12122  if (keyword == NULL) {
12123  ISEQ_BODY(iseq)->param.keyword = keyword = ZALLOC(struct rb_iseq_param_keyword);
12124  }
12125  keyword->rest_start = i;
12126  ISEQ_BODY(iseq)->param.flags.has_kwrest = TRUE;
12127  }
12128 #undef SYM
12129  iseq_calc_param_size(iseq);
12130 
12131  /* exception */
12132  iseq_build_from_ary_exception(iseq, labels_table, exception);
12133 
12134  /* body */
12135  iseq_build_from_ary_body(iseq, anchor, body, node_ids, labels_wrapper);
12136 
12137  ISEQ_BODY(iseq)->param.size = arg_size;
12138  ISEQ_BODY(iseq)->local_table_size = local_size;
12139  ISEQ_BODY(iseq)->stack_max = stack_max;
12140 }
12141 
12142 /* for parser */
12143 
12144 int
12145 rb_dvar_defined(ID id, const rb_iseq_t *iseq)
12146 {
12147  if (iseq) {
12148  const struct rb_iseq_constant_body *body = ISEQ_BODY(iseq);
12149  while (body->type == ISEQ_TYPE_BLOCK ||
12150  body->type == ISEQ_TYPE_RESCUE ||
12151  body->type == ISEQ_TYPE_ENSURE ||
12152  body->type == ISEQ_TYPE_EVAL ||
12153  body->type == ISEQ_TYPE_MAIN
12154  ) {
12155  unsigned int i;
12156 
12157  for (i = 0; i < body->local_table_size; i++) {
12158  if (body->local_table[i] == id) {
12159  return 1;
12160  }
12161  }
12162  iseq = body->parent_iseq;
12163  body = ISEQ_BODY(iseq);
12164  }
12165  }
12166  return 0;
12167 }
12168 
12169 int
12170 rb_local_defined(ID id, const rb_iseq_t *iseq)
12171 {
12172  if (iseq) {
12173  unsigned int i;
12174  const struct rb_iseq_constant_body *const body = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq);
12175 
12176  for (i=0; i<body->local_table_size; i++) {
12177  if (body->local_table[i] == id) {
12178  return 1;
12179  }
12180  }
12181  }
12182  return 0;
12183 }
12184 
12185 /* ISeq binary format */
12186 
12187 #ifndef IBF_ISEQ_DEBUG
12188 #define IBF_ISEQ_DEBUG 0
12189 #endif
12190 
12191 #ifndef IBF_ISEQ_ENABLE_LOCAL_BUFFER
12192 #define IBF_ISEQ_ENABLE_LOCAL_BUFFER 0
12193 #endif
12194 
12195 typedef uint32_t ibf_offset_t;
12196 #define IBF_OFFSET(ptr) ((ibf_offset_t)(VALUE)(ptr))
12197 
12198 #define IBF_MAJOR_VERSION ISEQ_MAJOR_VERSION
12199 #ifdef RUBY_DEVEL
12200 #define IBF_DEVEL_VERSION 4
12201 #define IBF_MINOR_VERSION (ISEQ_MINOR_VERSION * 10000 + IBF_DEVEL_VERSION)
12202 #else
12203 #define IBF_MINOR_VERSION ISEQ_MINOR_VERSION
12204 #endif
12205 
12206 static const char IBF_ENDIAN_MARK =
12207 #ifdef WORDS_BIGENDIAN
12208  'b'
12209 #else
12210  'l'
12211 #endif
12212  ;
12213 
12214 struct ibf_header {
12215  char magic[4]; /* YARB */
12216  uint32_t major_version;
12217  uint32_t minor_version;
12218  uint32_t size;
12219  uint32_t extra_size;
12220 
12221  uint32_t iseq_list_size;
12222  uint32_t global_object_list_size;
12223  ibf_offset_t iseq_list_offset;
12224  ibf_offset_t global_object_list_offset;
12225  uint8_t endian;
12226  uint8_t wordsize; /* assume no 2048-bit CPU */
12227 };
12228 
12230  VALUE str;
12231  st_table *obj_table; /* obj -> obj number */
12232 };
12233 
12234 struct ibf_dump {
12235  st_table *iseq_table; /* iseq -> iseq number */
12236  struct ibf_dump_buffer global_buffer;
12237  struct ibf_dump_buffer *current_buffer;
12238 };
12239 
12241  const char *buff;
12242  ibf_offset_t size;
12243 
12244  VALUE obj_list; /* [obj0, ...] */
12245  unsigned int obj_list_size;
12246  ibf_offset_t obj_list_offset;
12247 };
12248 
12249 struct ibf_load {
12250  const struct ibf_header *header;
12251  VALUE iseq_list; /* [iseq0, ...] */
12252  struct ibf_load_buffer global_buffer;
12253  VALUE loader_obj;
12254  rb_iseq_t *iseq;
12255  VALUE str;
12256  struct ibf_load_buffer *current_buffer;
12257 };
12258 
12259 struct pinned_list {
12260  long size;
12261  VALUE buffer[1];
12262 };
12263 
12264 static void
12265 pinned_list_mark(void *ptr)
12266 {
12267  long i;
12268  struct pinned_list *list = (struct pinned_list *)ptr;
12269  for (i = 0; i < list->size; i++) {
12270  if (list->buffer[i]) {
12271  rb_gc_mark(list->buffer[i]);
12272  }
12273  }
12274 }
12275 
12276 static const rb_data_type_t pinned_list_type = {
12277  "pinned_list",
12278  {
12279  pinned_list_mark,
12281  NULL, // No external memory to report,
12282  },
12283  0, 0, RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_EMBEDDABLE
12284 };
12285 
12286 static VALUE
12287 pinned_list_fetch(VALUE list, long offset)
12288 {
12289  struct pinned_list * ptr;
12290 
12291  TypedData_Get_Struct(list, struct pinned_list, &pinned_list_type, ptr);
12292 
12293  if (offset >= ptr->size) {
12294  rb_raise(rb_eIndexError, "object index out of range: %ld", offset);
12295  }
12296 
12297  return ptr->buffer[offset];
12298 }
12299 
12300 static void
12301 pinned_list_store(VALUE list, long offset, VALUE object)
12302 {
12303  struct pinned_list * ptr;
12304 
12305  TypedData_Get_Struct(list, struct pinned_list, &pinned_list_type, ptr);
12306 
12307  if (offset >= ptr->size) {
12308  rb_raise(rb_eIndexError, "object index out of range: %ld", offset);
12309  }
12310 
12311  RB_OBJ_WRITE(list, &ptr->buffer[offset], object);
12312 }
12313 
12314 static VALUE
12315 pinned_list_new(long size)
12316 {
12317  size_t memsize = offsetof(struct pinned_list, buffer) + size * sizeof(VALUE);
12318  VALUE obj_list = rb_data_typed_object_zalloc(0, memsize, &pinned_list_type);
12319  struct pinned_list * ptr = RTYPEDDATA_GET_DATA(obj_list);
12320  ptr->size = size;
12321  return obj_list;
12322 }
12323 
12324 static ibf_offset_t
12325 ibf_dump_pos(struct ibf_dump *dump)
12326 {
12327  long pos = RSTRING_LEN(dump->current_buffer->str);
12328 #if SIZEOF_LONG > SIZEOF_INT
12329  if (pos >= UINT_MAX) {
12330  rb_raise(rb_eRuntimeError, "dump size exceeds");
12331  }
12332 #endif
12333  return (unsigned int)pos;
12334 }
12335 
12336 static void
12337 ibf_dump_align(struct ibf_dump *dump, size_t align)
12338 {
12339  ibf_offset_t pos = ibf_dump_pos(dump);
12340  if (pos % align) {
12341  static const char padding[sizeof(VALUE)];
12342  size_t size = align - ((size_t)pos % align);
12343 #if SIZEOF_LONG > SIZEOF_INT
12344  if (pos + size >= UINT_MAX) {
12345  rb_raise(rb_eRuntimeError, "dump size exceeds");
12346  }
12347 #endif
12348  for (; size > sizeof(padding); size -= sizeof(padding)) {
12349  rb_str_cat(dump->current_buffer->str, padding, sizeof(padding));
12350  }
12351  rb_str_cat(dump->current_buffer->str, padding, size);
12352  }
12353 }
12354 
12355 static ibf_offset_t
12356 ibf_dump_write(struct ibf_dump *dump, const void *buff, unsigned long size)
12357 {
12358  ibf_offset_t pos = ibf_dump_pos(dump);
12359  rb_str_cat(dump->current_buffer->str, (const char *)buff, size);
12360  /* TODO: overflow check */
12361  return pos;
12362 }
12363 
12364 static ibf_offset_t
12365 ibf_dump_write_byte(struct ibf_dump *dump, unsigned char byte)
12366 {
12367  return ibf_dump_write(dump, &byte, sizeof(unsigned char));
12368 }
12369 
12370 static void
12371 ibf_dump_overwrite(struct ibf_dump *dump, void *buff, unsigned int size, long offset)
12372 {
12373  VALUE str = dump->current_buffer->str;
12374  char *ptr = RSTRING_PTR(str);
12375  if ((unsigned long)(size + offset) > (unsigned long)RSTRING_LEN(str))
12376  rb_bug("ibf_dump_overwrite: overflow");
12377  memcpy(ptr + offset, buff, size);
12378 }
12379 
12380 static const void *
12381 ibf_load_ptr(const struct ibf_load *load, ibf_offset_t *offset, int size)
12382 {
12383  ibf_offset_t beg = *offset;
12384  *offset += size;
12385  return load->current_buffer->buff + beg;
12386 }
12387 
12388 static void *
12389 ibf_load_alloc(const struct ibf_load *load, ibf_offset_t offset, size_t x, size_t y)
12390 {
12391  void *buff = ruby_xmalloc2(x, y);
12392  size_t size = x * y;
12393  memcpy(buff, load->current_buffer->buff + offset, size);
12394  return buff;
12395 }
12396 
12397 #define IBF_W_ALIGN(type) (RUBY_ALIGNOF(type) > 1 ? ibf_dump_align(dump, RUBY_ALIGNOF(type)) : (void)0)
12398 
12399 #define IBF_W(b, type, n) (IBF_W_ALIGN(type), (type *)(VALUE)IBF_WP(b, type, n))
12400 #define IBF_WV(variable) ibf_dump_write(dump, &(variable), sizeof(variable))
12401 #define IBF_WP(b, type, n) ibf_dump_write(dump, (b), sizeof(type) * (n))
12402 #define IBF_R(val, type, n) (type *)ibf_load_alloc(load, IBF_OFFSET(val), sizeof(type), (n))
12403 #define IBF_ZERO(variable) memset(&(variable), 0, sizeof(variable))
12404 
12405 static int
12406 ibf_table_lookup(struct st_table *table, st_data_t key)
12407 {
12408  st_data_t val;
12409 
12410  if (st_lookup(table, key, &val)) {
12411  return (int)val;
12412  }
12413  else {
12414  return -1;
12415  }
12416 }
12417 
12418 static int
12419 ibf_table_find_or_insert(struct st_table *table, st_data_t key)
12420 {
12421  int index = ibf_table_lookup(table, key);
12422 
12423  if (index < 0) { /* not found */
12424  index = (int)table->num_entries;
12425  st_insert(table, key, (st_data_t)index);
12426  }
12427 
12428  return index;
12429 }
12430 
12431 /* dump/load generic */
12432 
12433 static void ibf_dump_object_list(struct ibf_dump *dump, ibf_offset_t *obj_list_offset, unsigned int *obj_list_size);
12434 
12435 static VALUE ibf_load_object(const struct ibf_load *load, VALUE object_index);
12436 static rb_iseq_t *ibf_load_iseq(const struct ibf_load *load, const rb_iseq_t *index_iseq);
12437 
12438 static st_table *
12439 ibf_dump_object_table_new(void)
12440 {
12441  st_table *obj_table = st_init_numtable(); /* need free */
12442  st_insert(obj_table, (st_data_t)Qnil, (st_data_t)0); /* 0th is nil */
12443 
12444  return obj_table;
12445 }
12446 
12447 static VALUE
12448 ibf_dump_object(struct ibf_dump *dump, VALUE obj)
12449 {
12450  return ibf_table_find_or_insert(dump->current_buffer->obj_table, (st_data_t)obj);
12451 }
12452 
12453 static VALUE
12454 ibf_dump_id(struct ibf_dump *dump, ID id)
12455 {
12456  if (id == 0 || rb_id2name(id) == NULL) {
12457  return 0;
12458  }
12459  return ibf_dump_object(dump, rb_id2sym(id));
12460 }
12461 
12462 static ID
12463 ibf_load_id(const struct ibf_load *load, const ID id_index)
12464 {
12465  if (id_index == 0) {
12466  return 0;
12467  }
12468  VALUE sym = ibf_load_object(load, id_index);
12469  if (rb_integer_type_p(sym)) {
12470  /* Load hidden local variables as indexes */
12471  return NUM2ULONG(sym);
12472  }
12473  return rb_sym2id(sym);
12474 }
12475 
12476 /* dump/load: code */
12477 
12478 static ibf_offset_t ibf_dump_iseq_each(struct ibf_dump *dump, const rb_iseq_t *iseq);
12479 
12480 static int
12481 ibf_dump_iseq(struct ibf_dump *dump, const rb_iseq_t *iseq)
12482 {
12483  if (iseq == NULL) {
12484  return -1;
12485  }
12486  else {
12487  return ibf_table_find_or_insert(dump->iseq_table, (st_data_t)iseq);
12488  }
12489 }
12490 
12491 static unsigned char
12492 ibf_load_byte(const struct ibf_load *load, ibf_offset_t *offset)
12493 {
12494  if (*offset >= load->current_buffer->size) { rb_raise(rb_eRuntimeError, "invalid bytecode"); }
12495  return (unsigned char)load->current_buffer->buff[(*offset)++];
12496 }
12497 
12498 /*
12499  * Small uint serialization
12500  * 0x00000000_00000000 - 0x00000000_0000007f: 1byte | XXXX XXX1 |
12501  * 0x00000000_00000080 - 0x00000000_00003fff: 2byte | XXXX XX10 | XXXX XXXX |
12502  * 0x00000000_00004000 - 0x00000000_001fffff: 3byte | XXXX X100 | XXXX XXXX | XXXX XXXX |
12503  * 0x00000000_00020000 - 0x00000000_0fffffff: 4byte | XXXX 1000 | XXXX XXXX | XXXX XXXX | XXXX XXXX |
12504  * ...
12505  * 0x00010000_00000000 - 0x00ffffff_ffffffff: 8byte | 1000 0000 | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX |
12506  * 0x01000000_00000000 - 0xffffffff_ffffffff: 9byte | 0000 0000 | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX | XXXX XXXX |
12507  */
12508 static void
12509 ibf_dump_write_small_value(struct ibf_dump *dump, VALUE x)
12510 {
12511  if (sizeof(VALUE) > 8 || CHAR_BIT != 8) {
12512  ibf_dump_write(dump, &x, sizeof(VALUE));
12513  return;
12514  }
12515 
12516  enum { max_byte_length = sizeof(VALUE) + 1 };
12517 
12518  unsigned char bytes[max_byte_length];
12519  ibf_offset_t n;
12520 
12521  for (n = 0; n < sizeof(VALUE) && (x >> (7 - n)); n++, x >>= 8) {
12522  bytes[max_byte_length - 1 - n] = (unsigned char)x;
12523  }
12524 
12525  x <<= 1;
12526  x |= 1;
12527  x <<= n;
12528  bytes[max_byte_length - 1 - n] = (unsigned char)x;
12529  n++;
12530 
12531  ibf_dump_write(dump, bytes + max_byte_length - n, n);
12532 }
12533 
12534 static VALUE
12535 ibf_load_small_value(const struct ibf_load *load, ibf_offset_t *offset)
12536 {
12537  if (sizeof(VALUE) > 8 || CHAR_BIT != 8) {
12538  union { char s[sizeof(VALUE)]; VALUE v; } x;
12539 
12540  memcpy(x.s, load->current_buffer->buff + *offset, sizeof(VALUE));
12541  *offset += sizeof(VALUE);
12542 
12543  return x.v;
12544  }
12545 
12546  enum { max_byte_length = sizeof(VALUE) + 1 };
12547 
12548  const unsigned char *buffer = (const unsigned char *)load->current_buffer->buff;
12549  const unsigned char c = buffer[*offset];
12550 
12551  ibf_offset_t n =
12552  c & 1 ? 1 :
12553  c == 0 ? 9 : ntz_int32(c) + 1;
12554  VALUE x = (VALUE)c >> n;
12555 
12556  if (*offset + n > load->current_buffer->size) {
12557  rb_raise(rb_eRuntimeError, "invalid byte sequence");
12558  }
12559 
12560  ibf_offset_t i;
12561  for (i = 1; i < n; i++) {
12562  x <<= 8;
12563  x |= (VALUE)buffer[*offset + i];
12564  }
12565 
12566  *offset += n;
12567  return x;
12568 }
12569 
12570 static void
12571 ibf_dump_builtin(struct ibf_dump *dump, const struct rb_builtin_function *bf)
12572 {
12573  // short: index
12574  // short: name.length
12575  // bytes: name
12576  // // omit argc (only verify with name)
12577  ibf_dump_write_small_value(dump, (VALUE)bf->index);
12578 
12579  size_t len = strlen(bf->name);
12580  ibf_dump_write_small_value(dump, (VALUE)len);
12581  ibf_dump_write(dump, bf->name, len);
12582 }
12583 
12584 static const struct rb_builtin_function *
12585 ibf_load_builtin(const struct ibf_load *load, ibf_offset_t *offset)
12586 {
12587  int i = (int)ibf_load_small_value(load, offset);
12588  int len = (int)ibf_load_small_value(load, offset);
12589  const char *name = (char *)ibf_load_ptr(load, offset, len);
12590 
12591  if (0) {
12592  fprintf(stderr, "%.*s!!\n", len, name);
12593  }
12594 
12595  const struct rb_builtin_function *table = GET_VM()->builtin_function_table;
12596  if (table == NULL) rb_raise(rb_eArgError, "builtin function table is not provided");
12597  if (strncmp(table[i].name, name, len) != 0) {
12598  rb_raise(rb_eArgError, "builtin function index (%d) mismatch (expect %s but %s)", i, name, table[i].name);
12599  }
12600  // fprintf(stderr, "load-builtin: name:%s(%d)\n", table[i].name, table[i].argc);
12601 
12602  return &table[i];
12603 }
12604 
12605 static ibf_offset_t
12606 ibf_dump_code(struct ibf_dump *dump, const rb_iseq_t *iseq)
12607 {
12608  const struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
12609  const int iseq_size = body->iseq_size;
12610  int code_index;
12611  const VALUE *orig_code = rb_iseq_original_iseq(iseq);
12612 
12613  ibf_offset_t offset = ibf_dump_pos(dump);
12614 
12615  for (code_index=0; code_index<iseq_size;) {
12616  const VALUE insn = orig_code[code_index++];
12617  const char *types = insn_op_types(insn);
12618  int op_index;
12619 
12620  /* opcode */
12621  if (insn >= 0x100) { rb_raise(rb_eRuntimeError, "invalid instruction"); }
12622  ibf_dump_write_small_value(dump, insn);
12623 
12624  /* operands */
12625  for (op_index=0; types[op_index]; op_index++, code_index++) {
12626  VALUE op = orig_code[code_index];
12627  VALUE wv;
12628 
12629  switch (types[op_index]) {
12630  case TS_CDHASH:
12631  case TS_VALUE:
12632  wv = ibf_dump_object(dump, op);
12633  break;
12634  case TS_ISEQ:
12635  wv = (VALUE)ibf_dump_iseq(dump, (const rb_iseq_t *)op);
12636  break;
12637  case TS_IC:
12638  {
12639  IC ic = (IC)op;
12640  VALUE arr = idlist_to_array(ic->segments);
12641  wv = ibf_dump_object(dump, arr);
12642  }
12643  break;
12644  case TS_ISE:
12645  case TS_IVC:
12646  case TS_ICVARC:
12647  {
12648  union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)op;
12649  wv = is - ISEQ_IS_ENTRY_START(body, types[op_index]);
12650  }
12651  break;
12652  case TS_CALLDATA:
12653  {
12654  goto skip_wv;
12655  }
12656  case TS_ID:
12657  wv = ibf_dump_id(dump, (ID)op);
12658  break;
12659  case TS_FUNCPTR:
12660  rb_raise(rb_eRuntimeError, "TS_FUNCPTR is not supported");
12661  goto skip_wv;
12662  case TS_BUILTIN:
12663  ibf_dump_builtin(dump, (const struct rb_builtin_function *)op);
12664  goto skip_wv;
12665  default:
12666  wv = op;
12667  break;
12668  }
12669  ibf_dump_write_small_value(dump, wv);
12670  skip_wv:;
12671  }
12672  RUBY_ASSERT(insn_len(insn) == op_index+1);
12673  }
12674 
12675  return offset;
12676 }
12677 
12678 static VALUE *
12679 ibf_load_code(const struct ibf_load *load, rb_iseq_t *iseq, ibf_offset_t bytecode_offset, ibf_offset_t bytecode_size, unsigned int iseq_size)
12680 {
12681  VALUE iseqv = (VALUE)iseq;
12682  unsigned int code_index;
12683  ibf_offset_t reading_pos = bytecode_offset;
12684  VALUE *code = ALLOC_N(VALUE, iseq_size);
12685 
12686  struct rb_iseq_constant_body *load_body = ISEQ_BODY(iseq);
12687  struct rb_call_data *cd_entries = load_body->call_data;
12688  int ic_index = 0;
12689 
12690  iseq_bits_t * mark_offset_bits;
12691 
12692  iseq_bits_t tmp[1] = {0};
12693 
12694  if (ISEQ_MBITS_BUFLEN(iseq_size) == 1) {
12695  mark_offset_bits = tmp;
12696  }
12697  else {
12698  mark_offset_bits = ZALLOC_N(iseq_bits_t, ISEQ_MBITS_BUFLEN(iseq_size));
12699  }
12700  bool needs_bitmap = false;
12701 
12702  for (code_index=0; code_index<iseq_size;) {
12703  /* opcode */
12704  const VALUE insn = code[code_index] = ibf_load_small_value(load, &reading_pos);
12705  const char *types = insn_op_types(insn);
12706  int op_index;
12707 
12708  code_index++;
12709 
12710  /* operands */
12711  for (op_index=0; types[op_index]; op_index++, code_index++) {
12712  const char operand_type = types[op_index];
12713  switch (operand_type) {
12714  case TS_VALUE:
12715  {
12716  VALUE op = ibf_load_small_value(load, &reading_pos);
12717  VALUE v = ibf_load_object(load, op);
12718  code[code_index] = v;
12719  if (!SPECIAL_CONST_P(v)) {
12720  RB_OBJ_WRITTEN(iseqv, Qundef, v);
12721  ISEQ_MBITS_SET(mark_offset_bits, code_index);
12722  needs_bitmap = true;
12723  }
12724  break;
12725  }
12726  case TS_CDHASH:
12727  {
12728  VALUE op = ibf_load_small_value(load, &reading_pos);
12729  VALUE v = ibf_load_object(load, op);
12730  v = rb_hash_dup(v); // hash dumped as frozen
12731  RHASH_TBL_RAW(v)->type = &cdhash_type;
12732  rb_hash_rehash(v); // hash function changed
12733  freeze_hide_obj(v);
12734 
12735  // Overwrite the existing hash in the object list. This
12736  // is to keep the object alive during load time.
12737  // [Bug #17984] [ruby-core:104259]
12738  pinned_list_store(load->current_buffer->obj_list, (long)op, v);
12739 
12740  code[code_index] = v;
12741  ISEQ_MBITS_SET(mark_offset_bits, code_index);
12742  RB_OBJ_WRITTEN(iseqv, Qundef, v);
12743  needs_bitmap = true;
12744  break;
12745  }
12746  case TS_ISEQ:
12747  {
12748  VALUE op = (VALUE)ibf_load_small_value(load, &reading_pos);
12749  VALUE v = (VALUE)ibf_load_iseq(load, (const rb_iseq_t *)op);
12750  code[code_index] = v;
12751  if (!SPECIAL_CONST_P(v)) {
12752  RB_OBJ_WRITTEN(iseqv, Qundef, v);
12753  ISEQ_MBITS_SET(mark_offset_bits, code_index);
12754  needs_bitmap = true;
12755  }
12756  break;
12757  }
12758  case TS_IC:
12759  {
12760  VALUE op = ibf_load_small_value(load, &reading_pos);
12761  VALUE arr = ibf_load_object(load, op);
12762 
12763  IC ic = &ISEQ_IS_IC_ENTRY(load_body, ic_index++);
12764  ic->segments = array_to_idlist(arr);
12765 
12766  code[code_index] = (VALUE)ic;
12767  }
12768  break;
12769  case TS_ISE:
12770  case TS_ICVARC:
12771  case TS_IVC:
12772  {
12773  unsigned int op = (unsigned int)ibf_load_small_value(load, &reading_pos);
12774 
12775  ISE ic = ISEQ_IS_ENTRY_START(load_body, operand_type) + op;
12776  code[code_index] = (VALUE)ic;
12777 
12778  if (operand_type == TS_IVC) {
12779  IVC cache = (IVC)ic;
12780 
12781  if (insn == BIN(setinstancevariable)) {
12782  ID iv_name = (ID)code[code_index - 1];
12783  cache->iv_set_name = iv_name;
12784  }
12785  else {
12786  cache->iv_set_name = 0;
12787  }
12788 
12789  vm_ic_attr_index_initialize(cache, INVALID_SHAPE_ID);
12790  }
12791 
12792  }
12793  break;
12794  case TS_CALLDATA:
12795  {
12796  code[code_index] = (VALUE)cd_entries++;
12797  }
12798  break;
12799  case TS_ID:
12800  {
12801  VALUE op = ibf_load_small_value(load, &reading_pos);
12802  code[code_index] = ibf_load_id(load, (ID)(VALUE)op);
12803  }
12804  break;
12805  case TS_FUNCPTR:
12806  rb_raise(rb_eRuntimeError, "TS_FUNCPTR is not supported");
12807  break;
12808  case TS_BUILTIN:
12809  code[code_index] = (VALUE)ibf_load_builtin(load, &reading_pos);
12810  break;
12811  default:
12812  code[code_index] = ibf_load_small_value(load, &reading_pos);
12813  continue;
12814  }
12815  }
12816  if (insn_len(insn) != op_index+1) {
12817  rb_raise(rb_eRuntimeError, "operand size mismatch");
12818  }
12819  }
12820 
12821  load_body->iseq_encoded = code;
12822  load_body->iseq_size = code_index;
12823 
12824  if (ISEQ_MBITS_BUFLEN(load_body->iseq_size) == 1) {
12825  load_body->mark_bits.single = mark_offset_bits[0];
12826  }
12827  else {
12828  if (needs_bitmap) {
12829  load_body->mark_bits.list = mark_offset_bits;
12830  }
12831  else {
12832  load_body->mark_bits.list = 0;
12833  ruby_xfree(mark_offset_bits);
12834  }
12835  }
12836 
12837  RUBY_ASSERT(code_index == iseq_size);
12838  RUBY_ASSERT(reading_pos == bytecode_offset + bytecode_size);
12839  return code;
12840 }
12841 
12842 static ibf_offset_t
12843 ibf_dump_param_opt_table(struct ibf_dump *dump, const rb_iseq_t *iseq)
12844 {
12845  int opt_num = ISEQ_BODY(iseq)->param.opt_num;
12846 
12847  if (opt_num > 0) {
12848  IBF_W_ALIGN(VALUE);
12849  return ibf_dump_write(dump, ISEQ_BODY(iseq)->param.opt_table, sizeof(VALUE) * (opt_num + 1));
12850  }
12851  else {
12852  return ibf_dump_pos(dump);
12853  }
12854 }
12855 
12856 static VALUE *
12857 ibf_load_param_opt_table(const struct ibf_load *load, ibf_offset_t opt_table_offset, int opt_num)
12858 {
12859  if (opt_num > 0) {
12860  VALUE *table = ALLOC_N(VALUE, opt_num+1);
12861  MEMCPY(table, load->current_buffer->buff + opt_table_offset, VALUE, opt_num+1);
12862  return table;
12863  }
12864  else {
12865  return NULL;
12866  }
12867 }
12868 
12869 static ibf_offset_t
12870 ibf_dump_param_keyword(struct ibf_dump *dump, const rb_iseq_t *iseq)
12871 {
12872  const struct rb_iseq_param_keyword *kw = ISEQ_BODY(iseq)->param.keyword;
12873 
12874  if (kw) {
12875  struct rb_iseq_param_keyword dump_kw = *kw;
12876  int dv_num = kw->num - kw->required_num;
12877  ID *ids = kw->num > 0 ? ALLOCA_N(ID, kw->num) : NULL;
12878  VALUE *dvs = dv_num > 0 ? ALLOCA_N(VALUE, dv_num) : NULL;
12879  int i;
12880 
12881  for (i=0; i<kw->num; i++) ids[i] = (ID)ibf_dump_id(dump, kw->table[i]);
12882  for (i=0; i<dv_num; i++) dvs[i] = (VALUE)ibf_dump_object(dump, kw->default_values[i]);
12883 
12884  dump_kw.table = IBF_W(ids, ID, kw->num);
12885  dump_kw.default_values = IBF_W(dvs, VALUE, dv_num);
12886  IBF_W_ALIGN(struct rb_iseq_param_keyword);
12887  return ibf_dump_write(dump, &dump_kw, sizeof(struct rb_iseq_param_keyword) * 1);
12888  }
12889  else {
12890  return 0;
12891  }
12892 }
12893 
12894 static const struct rb_iseq_param_keyword *
12895 ibf_load_param_keyword(const struct ibf_load *load, ibf_offset_t param_keyword_offset)
12896 {
12897  if (param_keyword_offset) {
12898  struct rb_iseq_param_keyword *kw = IBF_R(param_keyword_offset, struct rb_iseq_param_keyword, 1);
12899  int dv_num = kw->num - kw->required_num;
12900  VALUE *dvs = dv_num ? IBF_R(kw->default_values, VALUE, dv_num) : NULL;
12901 
12902  int i;
12903  for (i=0; i<dv_num; i++) {
12904  dvs[i] = ibf_load_object(load, dvs[i]);
12905  }
12906 
12907  // Will be set once the local table is loaded.
12908  kw->table = NULL;
12909 
12910  kw->default_values = dvs;
12911  return kw;
12912  }
12913  else {
12914  return NULL;
12915  }
12916 }
12917 
12918 static ibf_offset_t
12919 ibf_dump_insns_info_body(struct ibf_dump *dump, const rb_iseq_t *iseq)
12920 {
12921  ibf_offset_t offset = ibf_dump_pos(dump);
12922  const struct iseq_insn_info_entry *entries = ISEQ_BODY(iseq)->insns_info.body;
12923 
12924  unsigned int i;
12925  for (i = 0; i < ISEQ_BODY(iseq)->insns_info.size; i++) {
12926  ibf_dump_write_small_value(dump, entries[i].line_no);
12927 #ifdef USE_ISEQ_NODE_ID
12928  ibf_dump_write_small_value(dump, entries[i].node_id);
12929 #endif
12930  ibf_dump_write_small_value(dump, entries[i].events);
12931  }
12932 
12933  return offset;
12934 }
12935 
12936 static struct iseq_insn_info_entry *
12937 ibf_load_insns_info_body(const struct ibf_load *load, ibf_offset_t body_offset, unsigned int size)
12938 {
12939  ibf_offset_t reading_pos = body_offset;
12940  struct iseq_insn_info_entry *entries = ALLOC_N(struct iseq_insn_info_entry, size);
12941 
12942  unsigned int i;
12943  for (i = 0; i < size; i++) {
12944  entries[i].line_no = (int)ibf_load_small_value(load, &reading_pos);
12945 #ifdef USE_ISEQ_NODE_ID
12946  entries[i].node_id = (int)ibf_load_small_value(load, &reading_pos);
12947 #endif
12948  entries[i].events = (rb_event_flag_t)ibf_load_small_value(load, &reading_pos);
12949  }
12950 
12951  return entries;
12952 }
12953 
12954 static ibf_offset_t
12955 ibf_dump_insns_info_positions(struct ibf_dump *dump, const unsigned int *positions, unsigned int size)
12956 {
12957  ibf_offset_t offset = ibf_dump_pos(dump);
12958 
12959  unsigned int last = 0;
12960  unsigned int i;
12961  for (i = 0; i < size; i++) {
12962  ibf_dump_write_small_value(dump, positions[i] - last);
12963  last = positions[i];
12964  }
12965 
12966  return offset;
12967 }
12968 
12969 static unsigned int *
12970 ibf_load_insns_info_positions(const struct ibf_load *load, ibf_offset_t positions_offset, unsigned int size)
12971 {
12972  ibf_offset_t reading_pos = positions_offset;
12973  unsigned int *positions = ALLOC_N(unsigned int, size);
12974 
12975  unsigned int last = 0;
12976  unsigned int i;
12977  for (i = 0; i < size; i++) {
12978  positions[i] = last + (unsigned int)ibf_load_small_value(load, &reading_pos);
12979  last = positions[i];
12980  }
12981 
12982  return positions;
12983 }
12984 
12985 static ibf_offset_t
12986 ibf_dump_local_table(struct ibf_dump *dump, const rb_iseq_t *iseq)
12987 {
12988  const struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
12989  const int size = body->local_table_size;
12990  ID *table = ALLOCA_N(ID, size);
12991  int i;
12992 
12993  for (i=0; i<size; i++) {
12994  VALUE v = ibf_dump_id(dump, body->local_table[i]);
12995  if (v == 0) {
12996  /* Dump hidden local variables as indexes, so load_from_binary will work with them */
12997  v = ibf_dump_object(dump, ULONG2NUM(body->local_table[i]));
12998  }
12999  table[i] = v;
13000  }
13001 
13002  IBF_W_ALIGN(ID);
13003  return ibf_dump_write(dump, table, sizeof(ID) * size);
13004 }
13005 
13006 static ID *
13007 ibf_load_local_table(const struct ibf_load *load, ibf_offset_t local_table_offset, int size)
13008 {
13009  if (size > 0) {
13010  ID *table = IBF_R(local_table_offset, ID, size);
13011  int i;
13012 
13013  for (i=0; i<size; i++) {
13014  table[i] = ibf_load_id(load, table[i]);
13015  }
13016  return table;
13017  }
13018  else {
13019  return NULL;
13020  }
13021 }
13022 
13023 static ibf_offset_t
13024 ibf_dump_catch_table(struct ibf_dump *dump, const rb_iseq_t *iseq)
13025 {
13026  const struct iseq_catch_table *table = ISEQ_BODY(iseq)->catch_table;
13027 
13028  if (table) {
13029  int *iseq_indices = ALLOCA_N(int, table->size);
13030  unsigned int i;
13031 
13032  for (i=0; i<table->size; i++) {
13033  iseq_indices[i] = ibf_dump_iseq(dump, table->entries[i].iseq);
13034  }
13035 
13036  const ibf_offset_t offset = ibf_dump_pos(dump);
13037 
13038  for (i=0; i<table->size; i++) {
13039  ibf_dump_write_small_value(dump, iseq_indices[i]);
13040  ibf_dump_write_small_value(dump, table->entries[i].type);
13041  ibf_dump_write_small_value(dump, table->entries[i].start);
13042  ibf_dump_write_small_value(dump, table->entries[i].end);
13043  ibf_dump_write_small_value(dump, table->entries[i].cont);
13044  ibf_dump_write_small_value(dump, table->entries[i].sp);
13045  }
13046  return offset;
13047  }
13048  else {
13049  return ibf_dump_pos(dump);
13050  }
13051 }
13052 
13053 static struct iseq_catch_table *
13054 ibf_load_catch_table(const struct ibf_load *load, ibf_offset_t catch_table_offset, unsigned int size)
13055 {
13056  if (size) {
13057  struct iseq_catch_table *table = ruby_xmalloc(iseq_catch_table_bytes(size));
13058  table->size = size;
13059 
13060  ibf_offset_t reading_pos = catch_table_offset;
13061 
13062  unsigned int i;
13063  for (i=0; i<table->size; i++) {
13064  int iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13065  table->entries[i].type = (enum rb_catch_type)ibf_load_small_value(load, &reading_pos);
13066  table->entries[i].start = (unsigned int)ibf_load_small_value(load, &reading_pos);
13067  table->entries[i].end = (unsigned int)ibf_load_small_value(load, &reading_pos);
13068  table->entries[i].cont = (unsigned int)ibf_load_small_value(load, &reading_pos);
13069  table->entries[i].sp = (unsigned int)ibf_load_small_value(load, &reading_pos);
13070 
13071  table->entries[i].iseq = ibf_load_iseq(load, (const rb_iseq_t *)(VALUE)iseq_index);
13072  }
13073  return table;
13074  }
13075  else {
13076  return NULL;
13077  }
13078 }
13079 
13080 static ibf_offset_t
13081 ibf_dump_ci_entries(struct ibf_dump *dump, const rb_iseq_t *iseq)
13082 {
13083  const struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
13084  const unsigned int ci_size = body->ci_size;
13085  const struct rb_call_data *cds = body->call_data;
13086 
13087  ibf_offset_t offset = ibf_dump_pos(dump);
13088 
13089  unsigned int i;
13090 
13091  for (i = 0; i < ci_size; i++) {
13092  const struct rb_callinfo *ci = cds[i].ci;
13093  if (ci != NULL) {
13094  ibf_dump_write_small_value(dump, ibf_dump_id(dump, vm_ci_mid(ci)));
13095  ibf_dump_write_small_value(dump, vm_ci_flag(ci));
13096  ibf_dump_write_small_value(dump, vm_ci_argc(ci));
13097 
13098  const struct rb_callinfo_kwarg *kwarg = vm_ci_kwarg(ci);
13099  if (kwarg) {
13100  int len = kwarg->keyword_len;
13101  ibf_dump_write_small_value(dump, len);
13102  for (int j=0; j<len; j++) {
13103  VALUE keyword = ibf_dump_object(dump, kwarg->keywords[j]);
13104  ibf_dump_write_small_value(dump, keyword);
13105  }
13106  }
13107  else {
13108  ibf_dump_write_small_value(dump, 0);
13109  }
13110  }
13111  else {
13112  // TODO: truncate NULL ci from call_data.
13113  ibf_dump_write_small_value(dump, (VALUE)-1);
13114  }
13115  }
13116 
13117  return offset;
13118 }
13119 
13121  ID id;
13122  VALUE name;
13123  VALUE val;
13124 };
13125 
13127  size_t num;
13128  struct outer_variable_pair pairs[1];
13129 };
13130 
13131 static enum rb_id_table_iterator_result
13132 store_outer_variable(ID id, VALUE val, void *dump)
13133 {
13134  struct outer_variable_list *ovlist = dump;
13135  struct outer_variable_pair *pair = &ovlist->pairs[ovlist->num++];
13136  pair->id = id;
13137  pair->name = rb_id2str(id);
13138  pair->val = val;
13139  return ID_TABLE_CONTINUE;
13140 }
13141 
13142 static int
13143 outer_variable_cmp(const void *a, const void *b, void *arg)
13144 {
13145  const struct outer_variable_pair *ap = (const struct outer_variable_pair *)a;
13146  const struct outer_variable_pair *bp = (const struct outer_variable_pair *)b;
13147  return rb_str_cmp(ap->name, bp->name);
13148 }
13149 
13150 static ibf_offset_t
13151 ibf_dump_outer_variables(struct ibf_dump *dump, const rb_iseq_t *iseq)
13152 {
13153  struct rb_id_table * ovs = ISEQ_BODY(iseq)->outer_variables;
13154 
13155  ibf_offset_t offset = ibf_dump_pos(dump);
13156 
13157  size_t size = ovs ? rb_id_table_size(ovs) : 0;
13158  ibf_dump_write_small_value(dump, (VALUE)size);
13159  if (size > 0) {
13160  VALUE buff;
13161  size_t buffsize =
13162  rb_size_mul_add_or_raise(sizeof(struct outer_variable_pair), size,
13163  offsetof(struct outer_variable_list, pairs),
13164  rb_eArgError);
13165  struct outer_variable_list *ovlist = RB_ALLOCV(buff, buffsize);
13166  ovlist->num = 0;
13167  rb_id_table_foreach(ovs, store_outer_variable, ovlist);
13168  ruby_qsort(ovlist->pairs, size, sizeof(struct outer_variable_pair), outer_variable_cmp, NULL);
13169  for (size_t i = 0; i < size; ++i) {
13170  ID id = ovlist->pairs[i].id;
13171  ID val = ovlist->pairs[i].val;
13172  ibf_dump_write_small_value(dump, ibf_dump_id(dump, id));
13173  ibf_dump_write_small_value(dump, val);
13174  }
13175  }
13176 
13177  return offset;
13178 }
13179 
13180 /* note that we dump out rb_call_info but load back rb_call_data */
13181 static void
13182 ibf_load_ci_entries(const struct ibf_load *load,
13183  ibf_offset_t ci_entries_offset,
13184  unsigned int ci_size,
13185  struct rb_call_data **cd_ptr)
13186 {
13187  ibf_offset_t reading_pos = ci_entries_offset;
13188 
13189  unsigned int i;
13190 
13191  struct rb_call_data *cds = ZALLOC_N(struct rb_call_data, ci_size);
13192  *cd_ptr = cds;
13193 
13194  for (i = 0; i < ci_size; i++) {
13195  VALUE mid_index = ibf_load_small_value(load, &reading_pos);
13196  if (mid_index != (VALUE)-1) {
13197  ID mid = ibf_load_id(load, mid_index);
13198  unsigned int flag = (unsigned int)ibf_load_small_value(load, &reading_pos);
13199  unsigned int argc = (unsigned int)ibf_load_small_value(load, &reading_pos);
13200 
13201  struct rb_callinfo_kwarg *kwarg = NULL;
13202  int kwlen = (int)ibf_load_small_value(load, &reading_pos);
13203  if (kwlen > 0) {
13204  kwarg = rb_xmalloc_mul_add(kwlen, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
13205  kwarg->references = 0;
13206  kwarg->keyword_len = kwlen;
13207  for (int j=0; j<kwlen; j++) {
13208  VALUE keyword = ibf_load_small_value(load, &reading_pos);
13209  kwarg->keywords[j] = ibf_load_object(load, keyword);
13210  }
13211  }
13212 
13213  cds[i].ci = vm_ci_new(mid, flag, argc, kwarg);
13214  RB_OBJ_WRITTEN(load->iseq, Qundef, cds[i].ci);
13215  cds[i].cc = vm_cc_empty();
13216  }
13217  else {
13218  // NULL ci
13219  cds[i].ci = NULL;
13220  cds[i].cc = NULL;
13221  }
13222  }
13223 }
13224 
13225 static struct rb_id_table *
13226 ibf_load_outer_variables(const struct ibf_load * load, ibf_offset_t outer_variables_offset)
13227 {
13228  ibf_offset_t reading_pos = outer_variables_offset;
13229 
13230  struct rb_id_table *tbl = NULL;
13231 
13232  size_t table_size = (size_t)ibf_load_small_value(load, &reading_pos);
13233 
13234  if (table_size > 0) {
13235  tbl = rb_id_table_create(table_size);
13236  }
13237 
13238  for (size_t i = 0; i < table_size; i++) {
13239  ID key = ibf_load_id(load, (ID)ibf_load_small_value(load, &reading_pos));
13240  VALUE value = ibf_load_small_value(load, &reading_pos);
13241  if (!key) key = rb_make_temporary_id(i);
13242  rb_id_table_insert(tbl, key, value);
13243  }
13244 
13245  return tbl;
13246 }
13247 
13248 static ibf_offset_t
13249 ibf_dump_iseq_each(struct ibf_dump *dump, const rb_iseq_t *iseq)
13250 {
13251  RUBY_ASSERT(dump->current_buffer == &dump->global_buffer);
13252 
13253  unsigned int *positions;
13254 
13255  const struct rb_iseq_constant_body *body = ISEQ_BODY(iseq);
13256 
13257  const VALUE location_pathobj_index = ibf_dump_object(dump, body->location.pathobj); /* TODO: freeze */
13258  const VALUE location_base_label_index = ibf_dump_object(dump, body->location.base_label);
13259  const VALUE location_label_index = ibf_dump_object(dump, body->location.label);
13260 
13261 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13262  ibf_offset_t iseq_start = ibf_dump_pos(dump);
13263 
13264  struct ibf_dump_buffer *saved_buffer = dump->current_buffer;
13265  struct ibf_dump_buffer buffer;
13266  buffer.str = rb_str_new(0, 0);
13267  buffer.obj_table = ibf_dump_object_table_new();
13268  dump->current_buffer = &buffer;
13269 #endif
13270 
13271  const ibf_offset_t bytecode_offset = ibf_dump_code(dump, iseq);
13272  const ibf_offset_t bytecode_size = ibf_dump_pos(dump) - bytecode_offset;
13273  const ibf_offset_t param_opt_table_offset = ibf_dump_param_opt_table(dump, iseq);
13274  const ibf_offset_t param_keyword_offset = ibf_dump_param_keyword(dump, iseq);
13275  const ibf_offset_t insns_info_body_offset = ibf_dump_insns_info_body(dump, iseq);
13276 
13277  positions = rb_iseq_insns_info_decode_positions(ISEQ_BODY(iseq));
13278  const ibf_offset_t insns_info_positions_offset = ibf_dump_insns_info_positions(dump, positions, body->insns_info.size);
13279  ruby_xfree(positions);
13280 
13281  const ibf_offset_t local_table_offset = ibf_dump_local_table(dump, iseq);
13282  const unsigned int catch_table_size = body->catch_table ? body->catch_table->size : 0;
13283  const ibf_offset_t catch_table_offset = ibf_dump_catch_table(dump, iseq);
13284  const int parent_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->parent_iseq);
13285  const int local_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->local_iseq);
13286  const int mandatory_only_iseq_index = ibf_dump_iseq(dump, ISEQ_BODY(iseq)->mandatory_only_iseq);
13287  const ibf_offset_t ci_entries_offset = ibf_dump_ci_entries(dump, iseq);
13288  const ibf_offset_t outer_variables_offset = ibf_dump_outer_variables(dump, iseq);
13289 
13290 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13291  ibf_offset_t local_obj_list_offset;
13292  unsigned int local_obj_list_size;
13293 
13294  ibf_dump_object_list(dump, &local_obj_list_offset, &local_obj_list_size);
13295 #endif
13296 
13297  ibf_offset_t body_offset = ibf_dump_pos(dump);
13298 
13299  /* dump the constant body */
13300  unsigned int param_flags =
13301  (body->param.flags.has_lead << 0) |
13302  (body->param.flags.has_opt << 1) |
13303  (body->param.flags.has_rest << 2) |
13304  (body->param.flags.has_post << 3) |
13305  (body->param.flags.has_kw << 4) |
13306  (body->param.flags.has_kwrest << 5) |
13307  (body->param.flags.has_block << 6) |
13308  (body->param.flags.ambiguous_param0 << 7) |
13309  (body->param.flags.accepts_no_kwarg << 8) |
13310  (body->param.flags.ruby2_keywords << 9) |
13311  (body->param.flags.anon_rest << 10) |
13312  (body->param.flags.anon_kwrest << 11) |
13313  (body->param.flags.use_block << 12) |
13314  (body->param.flags.forwardable << 13) ;
13315 
13316 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13317 # define IBF_BODY_OFFSET(x) (x)
13318 #else
13319 # define IBF_BODY_OFFSET(x) (body_offset - (x))
13320 #endif
13321 
13322  ibf_dump_write_small_value(dump, body->type);
13323  ibf_dump_write_small_value(dump, body->iseq_size);
13324  ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(bytecode_offset));
13325  ibf_dump_write_small_value(dump, bytecode_size);
13326  ibf_dump_write_small_value(dump, param_flags);
13327  ibf_dump_write_small_value(dump, body->param.size);
13328  ibf_dump_write_small_value(dump, body->param.lead_num);
13329  ibf_dump_write_small_value(dump, body->param.opt_num);
13330  ibf_dump_write_small_value(dump, body->param.rest_start);
13331  ibf_dump_write_small_value(dump, body->param.post_start);
13332  ibf_dump_write_small_value(dump, body->param.post_num);
13333  ibf_dump_write_small_value(dump, body->param.block_start);
13334  ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(param_opt_table_offset));
13335  ibf_dump_write_small_value(dump, param_keyword_offset);
13336  ibf_dump_write_small_value(dump, location_pathobj_index);
13337  ibf_dump_write_small_value(dump, location_base_label_index);
13338  ibf_dump_write_small_value(dump, location_label_index);
13339  ibf_dump_write_small_value(dump, body->location.first_lineno);
13340  ibf_dump_write_small_value(dump, body->location.node_id);
13341  ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.lineno);
13342  ibf_dump_write_small_value(dump, body->location.code_location.beg_pos.column);
13343  ibf_dump_write_small_value(dump, body->location.code_location.end_pos.lineno);
13344  ibf_dump_write_small_value(dump, body->location.code_location.end_pos.column);
13345  ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_body_offset));
13346  ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(insns_info_positions_offset));
13347  ibf_dump_write_small_value(dump, body->insns_info.size);
13348  ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(local_table_offset));
13349  ibf_dump_write_small_value(dump, catch_table_size);
13350  ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(catch_table_offset));
13351  ibf_dump_write_small_value(dump, parent_iseq_index);
13352  ibf_dump_write_small_value(dump, local_iseq_index);
13353  ibf_dump_write_small_value(dump, mandatory_only_iseq_index);
13354  ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(ci_entries_offset));
13355  ibf_dump_write_small_value(dump, IBF_BODY_OFFSET(outer_variables_offset));
13356  ibf_dump_write_small_value(dump, body->variable.flip_count);
13357  ibf_dump_write_small_value(dump, body->local_table_size);
13358  ibf_dump_write_small_value(dump, body->ivc_size);
13359  ibf_dump_write_small_value(dump, body->icvarc_size);
13360  ibf_dump_write_small_value(dump, body->ise_size);
13361  ibf_dump_write_small_value(dump, body->ic_size);
13362  ibf_dump_write_small_value(dump, body->ci_size);
13363  ibf_dump_write_small_value(dump, body->stack_max);
13364  ibf_dump_write_small_value(dump, body->builtin_attrs);
13365  ibf_dump_write_small_value(dump, body->prism ? 1 : 0);
13366 
13367 #undef IBF_BODY_OFFSET
13368 
13369 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13370  ibf_offset_t iseq_length_bytes = ibf_dump_pos(dump);
13371 
13372  dump->current_buffer = saved_buffer;
13373  ibf_dump_write(dump, RSTRING_PTR(buffer.str), iseq_length_bytes);
13374 
13375  ibf_offset_t offset = ibf_dump_pos(dump);
13376  ibf_dump_write_small_value(dump, iseq_start);
13377  ibf_dump_write_small_value(dump, iseq_length_bytes);
13378  ibf_dump_write_small_value(dump, body_offset);
13379 
13380  ibf_dump_write_small_value(dump, local_obj_list_offset);
13381  ibf_dump_write_small_value(dump, local_obj_list_size);
13382 
13383  st_free_table(buffer.obj_table); // TODO: this leaks in case of exception
13384 
13385  return offset;
13386 #else
13387  return body_offset;
13388 #endif
13389 }
13390 
13391 static VALUE
13392 ibf_load_location_str(const struct ibf_load *load, VALUE str_index)
13393 {
13394  VALUE str = ibf_load_object(load, str_index);
13395  if (str != Qnil) {
13396  str = rb_fstring(str);
13397  }
13398  return str;
13399 }
13400 
13401 static void
13402 ibf_load_iseq_each(struct ibf_load *load, rb_iseq_t *iseq, ibf_offset_t offset)
13403 {
13404  struct rb_iseq_constant_body *load_body = ISEQ_BODY(iseq) = rb_iseq_constant_body_alloc();
13405 
13406  ibf_offset_t reading_pos = offset;
13407 
13408 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13409  struct ibf_load_buffer *saved_buffer = load->current_buffer;
13410  load->current_buffer = &load->global_buffer;
13411 
13412  const ibf_offset_t iseq_start = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13413  const ibf_offset_t iseq_length_bytes = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13414  const ibf_offset_t body_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13415 
13416  struct ibf_load_buffer buffer;
13417  buffer.buff = load->global_buffer.buff + iseq_start;
13418  buffer.size = iseq_length_bytes;
13419  buffer.obj_list_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13420  buffer.obj_list_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13421  buffer.obj_list = pinned_list_new(buffer.obj_list_size);
13422 
13423  load->current_buffer = &buffer;
13424  reading_pos = body_offset;
13425 #endif
13426 
13427 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13428 # define IBF_BODY_OFFSET(x) (x)
13429 #else
13430 # define IBF_BODY_OFFSET(x) (offset - (x))
13431 #endif
13432 
13433  const unsigned int type = (unsigned int)ibf_load_small_value(load, &reading_pos);
13434  const unsigned int iseq_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13435  const ibf_offset_t bytecode_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13436  const ibf_offset_t bytecode_size = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13437  const unsigned int param_flags = (unsigned int)ibf_load_small_value(load, &reading_pos);
13438  const unsigned int param_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13439  const int param_lead_num = (int)ibf_load_small_value(load, &reading_pos);
13440  const int param_opt_num = (int)ibf_load_small_value(load, &reading_pos);
13441  const int param_rest_start = (int)ibf_load_small_value(load, &reading_pos);
13442  const int param_post_start = (int)ibf_load_small_value(load, &reading_pos);
13443  const int param_post_num = (int)ibf_load_small_value(load, &reading_pos);
13444  const int param_block_start = (int)ibf_load_small_value(load, &reading_pos);
13445  const ibf_offset_t param_opt_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13446  const ibf_offset_t param_keyword_offset = (ibf_offset_t)ibf_load_small_value(load, &reading_pos);
13447  const VALUE location_pathobj_index = ibf_load_small_value(load, &reading_pos);
13448  const VALUE location_base_label_index = ibf_load_small_value(load, &reading_pos);
13449  const VALUE location_label_index = ibf_load_small_value(load, &reading_pos);
13450  const int location_first_lineno = (int)ibf_load_small_value(load, &reading_pos);
13451  const int location_node_id = (int)ibf_load_small_value(load, &reading_pos);
13452  const int location_code_location_beg_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13453  const int location_code_location_beg_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13454  const int location_code_location_end_pos_lineno = (int)ibf_load_small_value(load, &reading_pos);
13455  const int location_code_location_end_pos_column = (int)ibf_load_small_value(load, &reading_pos);
13456  const ibf_offset_t insns_info_body_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13457  const ibf_offset_t insns_info_positions_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13458  const unsigned int insns_info_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13459  const ibf_offset_t local_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13460  const unsigned int catch_table_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13461  const ibf_offset_t catch_table_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13462  const int parent_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13463  const int local_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13464  const int mandatory_only_iseq_index = (int)ibf_load_small_value(load, &reading_pos);
13465  const ibf_offset_t ci_entries_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13466  const ibf_offset_t outer_variables_offset = (ibf_offset_t)IBF_BODY_OFFSET(ibf_load_small_value(load, &reading_pos));
13467  const rb_snum_t variable_flip_count = (rb_snum_t)ibf_load_small_value(load, &reading_pos);
13468  const unsigned int local_table_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13469 
13470  const unsigned int ivc_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13471  const unsigned int icvarc_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13472  const unsigned int ise_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13473  const unsigned int ic_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13474 
13475  const unsigned int ci_size = (unsigned int)ibf_load_small_value(load, &reading_pos);
13476  const unsigned int stack_max = (unsigned int)ibf_load_small_value(load, &reading_pos);
13477  const unsigned int builtin_attrs = (unsigned int)ibf_load_small_value(load, &reading_pos);
13478  const bool prism = (bool)ibf_load_small_value(load, &reading_pos);
13479 
13480  // setup fname and dummy frame
13481  VALUE path = ibf_load_object(load, location_pathobj_index);
13482  {
13483  VALUE realpath = Qnil;
13484 
13485  if (RB_TYPE_P(path, T_STRING)) {
13486  realpath = path = rb_fstring(path);
13487  }
13488  else if (RB_TYPE_P(path, T_ARRAY)) {
13489  VALUE pathobj = path;
13490  if (RARRAY_LEN(pathobj) != 2) {
13491  rb_raise(rb_eRuntimeError, "path object size mismatch");
13492  }
13493  path = rb_fstring(RARRAY_AREF(pathobj, 0));
13494  realpath = RARRAY_AREF(pathobj, 1);
13495  if (!NIL_P(realpath)) {
13496  if (!RB_TYPE_P(realpath, T_STRING)) {
13497  rb_raise(rb_eArgError, "unexpected realpath %"PRIxVALUE
13498  "(%x), path=%+"PRIsVALUE,
13499  realpath, TYPE(realpath), path);
13500  }
13501  realpath = rb_fstring(realpath);
13502  }
13503  }
13504  else {
13505  rb_raise(rb_eRuntimeError, "unexpected path object");
13506  }
13507  rb_iseq_pathobj_set(iseq, path, realpath);
13508  }
13509 
13510  // push dummy frame
13511  rb_execution_context_t *ec = GET_EC();
13512  VALUE dummy_frame = rb_vm_push_frame_fname(ec, path);
13513 
13514 #undef IBF_BODY_OFFSET
13515 
13516  load_body->type = type;
13517  load_body->stack_max = stack_max;
13518  load_body->param.flags.has_lead = (param_flags >> 0) & 1;
13519  load_body->param.flags.has_opt = (param_flags >> 1) & 1;
13520  load_body->param.flags.has_rest = (param_flags >> 2) & 1;
13521  load_body->param.flags.has_post = (param_flags >> 3) & 1;
13522  load_body->param.flags.has_kw = FALSE;
13523  load_body->param.flags.has_kwrest = (param_flags >> 5) & 1;
13524  load_body->param.flags.has_block = (param_flags >> 6) & 1;
13525  load_body->param.flags.ambiguous_param0 = (param_flags >> 7) & 1;
13526  load_body->param.flags.accepts_no_kwarg = (param_flags >> 8) & 1;
13527  load_body->param.flags.ruby2_keywords = (param_flags >> 9) & 1;
13528  load_body->param.flags.anon_rest = (param_flags >> 10) & 1;
13529  load_body->param.flags.anon_kwrest = (param_flags >> 11) & 1;
13530  load_body->param.flags.use_block = (param_flags >> 12) & 1;
13531  load_body->param.flags.forwardable = (param_flags >> 13) & 1;
13532  load_body->param.size = param_size;
13533  load_body->param.lead_num = param_lead_num;
13534  load_body->param.opt_num = param_opt_num;
13535  load_body->param.rest_start = param_rest_start;
13536  load_body->param.post_start = param_post_start;
13537  load_body->param.post_num = param_post_num;
13538  load_body->param.block_start = param_block_start;
13539  load_body->local_table_size = local_table_size;
13540  load_body->ci_size = ci_size;
13541  load_body->insns_info.size = insns_info_size;
13542 
13543  ISEQ_COVERAGE_SET(iseq, Qnil);
13544  ISEQ_ORIGINAL_ISEQ_CLEAR(iseq);
13545  load_body->variable.flip_count = variable_flip_count;
13546  load_body->variable.script_lines = Qnil;
13547 
13548  load_body->location.first_lineno = location_first_lineno;
13549  load_body->location.node_id = location_node_id;
13550  load_body->location.code_location.beg_pos.lineno = location_code_location_beg_pos_lineno;
13551  load_body->location.code_location.beg_pos.column = location_code_location_beg_pos_column;
13552  load_body->location.code_location.end_pos.lineno = location_code_location_end_pos_lineno;
13553  load_body->location.code_location.end_pos.column = location_code_location_end_pos_column;
13554  load_body->builtin_attrs = builtin_attrs;
13555  load_body->prism = prism;
13556 
13557  load_body->ivc_size = ivc_size;
13558  load_body->icvarc_size = icvarc_size;
13559  load_body->ise_size = ise_size;
13560  load_body->ic_size = ic_size;
13561 
13562  if (ISEQ_IS_SIZE(load_body)) {
13563  load_body->is_entries = ZALLOC_N(union iseq_inline_storage_entry, ISEQ_IS_SIZE(load_body));
13564  }
13565  else {
13566  load_body->is_entries = NULL;
13567  }
13568  ibf_load_ci_entries(load, ci_entries_offset, ci_size, &load_body->call_data);
13569  load_body->outer_variables = ibf_load_outer_variables(load, outer_variables_offset);
13570  load_body->param.opt_table = ibf_load_param_opt_table(load, param_opt_table_offset, param_opt_num);
13571  load_body->param.keyword = ibf_load_param_keyword(load, param_keyword_offset);
13572  load_body->param.flags.has_kw = (param_flags >> 4) & 1;
13573  load_body->insns_info.body = ibf_load_insns_info_body(load, insns_info_body_offset, insns_info_size);
13574  load_body->insns_info.positions = ibf_load_insns_info_positions(load, insns_info_positions_offset, insns_info_size);
13575  load_body->local_table = ibf_load_local_table(load, local_table_offset, local_table_size);
13576  load_body->catch_table = ibf_load_catch_table(load, catch_table_offset, catch_table_size);
13577  load_body->parent_iseq = ibf_load_iseq(load, (const rb_iseq_t *)(VALUE)parent_iseq_index);
13578  load_body->local_iseq = ibf_load_iseq(load, (const rb_iseq_t *)(VALUE)local_iseq_index);
13579  load_body->mandatory_only_iseq = ibf_load_iseq(load, (const rb_iseq_t *)(VALUE)mandatory_only_iseq_index);
13580 
13581  // This must be done after the local table is loaded.
13582  if (load_body->param.keyword != NULL) {
13583  RUBY_ASSERT(load_body->local_table);
13584  struct rb_iseq_param_keyword *keyword = (struct rb_iseq_param_keyword *) load_body->param.keyword;
13585  keyword->table = &load_body->local_table[keyword->bits_start - keyword->num];
13586  }
13587 
13588  ibf_load_code(load, iseq, bytecode_offset, bytecode_size, iseq_size);
13589 #if VM_INSN_INFO_TABLE_IMPL == 2
13590  rb_iseq_insns_info_encode_positions(iseq);
13591 #endif
13592 
13593  rb_iseq_translate_threaded_code(iseq);
13594 
13595 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13596  load->current_buffer = &load->global_buffer;
13597 #endif
13598 
13599  RB_OBJ_WRITE(iseq, &load_body->location.base_label, ibf_load_location_str(load, location_base_label_index));
13600  RB_OBJ_WRITE(iseq, &load_body->location.label, ibf_load_location_str(load, location_label_index));
13601 
13602 #if IBF_ISEQ_ENABLE_LOCAL_BUFFER
13603  load->current_buffer = saved_buffer;
13604 #endif
13605  verify_call_cache(iseq);
13606 
13607  RB_GC_GUARD(dummy_frame);
13608  rb_vm_pop_frame_no_int(ec);
13609 }
13610 
13612 {
13613  struct ibf_dump *dump;
13614  VALUE offset_list;
13615 };
13616 
13617 static int
13618 ibf_dump_iseq_list_i(st_data_t key, st_data_t val, st_data_t ptr)
13619 {
13620  const rb_iseq_t *iseq = (const rb_iseq_t *)key;
13621  struct ibf_dump_iseq_list_arg *args = (struct ibf_dump_iseq_list_arg *)ptr;
13622 
13623  ibf_offset_t offset = ibf_dump_iseq_each(args->dump, iseq);
13624  rb_ary_push(args->offset_list, UINT2NUM(offset));
13625 
13626  return ST_CONTINUE;
13627 }
13628 
13629 static void
13630 ibf_dump_iseq_list(struct ibf_dump *dump, struct ibf_header *header)
13631 {
13632  VALUE offset_list = rb_ary_hidden_new(dump->iseq_table->num_entries);
13633 
13634  struct ibf_dump_iseq_list_arg args;
13635  args.dump = dump;
13636  args.offset_list = offset_list;
13637 
13638  st_foreach(dump->iseq_table, ibf_dump_iseq_list_i, (st_data_t)&args);
13639 
13640  st_index_t i;
13641  st_index_t size = dump->iseq_table->num_entries;
13642  ibf_offset_t *offsets = ALLOCA_N(ibf_offset_t, size);
13643 
13644  for (i = 0; i < size; i++) {
13645  offsets[i] = NUM2UINT(RARRAY_AREF(offset_list, i));
13646  }
13647 
13648  ibf_dump_align(dump, sizeof(ibf_offset_t));
13649  header->iseq_list_offset = ibf_dump_write(dump, offsets, sizeof(ibf_offset_t) * size);
13650  header->iseq_list_size = (unsigned int)size;
13651 }
13652 
13653 #define IBF_OBJECT_INTERNAL FL_PROMOTED0
13654 
13655 /*
13656  * Binary format
13657  * - ibf_object_header
13658  * - ibf_object_xxx (xxx is type)
13659  */
13660 
13662  unsigned int type: 5;
13663  unsigned int special_const: 1;
13664  unsigned int frozen: 1;
13665  unsigned int internal: 1;
13666 };
13667 
13668 enum ibf_object_class_index {
13669  IBF_OBJECT_CLASS_OBJECT,
13670  IBF_OBJECT_CLASS_ARRAY,
13671  IBF_OBJECT_CLASS_STANDARD_ERROR,
13672  IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR,
13673  IBF_OBJECT_CLASS_TYPE_ERROR,
13674  IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR,
13675 };
13676 
13678  long srcstr;
13679  char option;
13680 };
13681 
13683  long len;
13684  long keyval[FLEX_ARY_LEN];
13685 };
13686 
13688  long class_index;
13689  long len;
13690  long beg;
13691  long end;
13692  int excl;
13693 };
13694 
13696  ssize_t slen;
13697  BDIGIT digits[FLEX_ARY_LEN];
13698 };
13699 
13700 enum ibf_object_data_type {
13701  IBF_OBJECT_DATA_ENCODING,
13702 };
13703 
13705  long a, b;
13706 };
13707 
13709  long str;
13710 };
13711 
13712 #define IBF_ALIGNED_OFFSET(align, offset) /* offset > 0 */ \
13713  ((((offset) - 1) / (align) + 1) * (align))
13714 #define IBF_OBJBODY(type, offset) (const type *)\
13715  ibf_load_check_offset(load, IBF_ALIGNED_OFFSET(RUBY_ALIGNOF(type), offset))
13716 
13717 static const void *
13718 ibf_load_check_offset(const struct ibf_load *load, size_t offset)
13719 {
13720  if (offset >= load->current_buffer->size) {
13721  rb_raise(rb_eIndexError, "object offset out of range: %"PRIdSIZE, offset);
13722  }
13723  return load->current_buffer->buff + offset;
13724 }
13725 
13726 NORETURN(static void ibf_dump_object_unsupported(struct ibf_dump *dump, VALUE obj));
13727 
13728 static void
13729 ibf_dump_object_unsupported(struct ibf_dump *dump, VALUE obj)
13730 {
13731  char buff[0x100];
13732  rb_raw_obj_info(buff, sizeof(buff), obj);
13733  rb_raise(rb_eNotImpError, "ibf_dump_object_unsupported: %s", buff);
13734 }
13735 
13736 NORETURN(static VALUE ibf_load_object_unsupported(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset));
13737 
13738 static VALUE
13739 ibf_load_object_unsupported(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
13740 {
13741  rb_raise(rb_eArgError, "unsupported");
13743 }
13744 
13745 static void
13746 ibf_dump_object_class(struct ibf_dump *dump, VALUE obj)
13747 {
13748  enum ibf_object_class_index cindex;
13749  if (obj == rb_cObject) {
13750  cindex = IBF_OBJECT_CLASS_OBJECT;
13751  }
13752  else if (obj == rb_cArray) {
13753  cindex = IBF_OBJECT_CLASS_ARRAY;
13754  }
13755  else if (obj == rb_eStandardError) {
13756  cindex = IBF_OBJECT_CLASS_STANDARD_ERROR;
13757  }
13758  else if (obj == rb_eNoMatchingPatternError) {
13759  cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR;
13760  }
13761  else if (obj == rb_eTypeError) {
13762  cindex = IBF_OBJECT_CLASS_TYPE_ERROR;
13763  }
13764  else if (obj == rb_eNoMatchingPatternKeyError) {
13765  cindex = IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR;
13766  }
13767  else {
13768  rb_obj_info_dump(obj);
13769  rb_p(obj);
13770  rb_bug("unsupported class");
13771  }
13772  ibf_dump_write_small_value(dump, (VALUE)cindex);
13773 }
13774 
13775 static VALUE
13776 ibf_load_object_class(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
13777 {
13778  enum ibf_object_class_index cindex = (enum ibf_object_class_index)ibf_load_small_value(load, &offset);
13779 
13780  switch (cindex) {
13781  case IBF_OBJECT_CLASS_OBJECT:
13782  return rb_cObject;
13783  case IBF_OBJECT_CLASS_ARRAY:
13784  return rb_cArray;
13785  case IBF_OBJECT_CLASS_STANDARD_ERROR:
13786  return rb_eStandardError;
13787  case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_ERROR:
13789  case IBF_OBJECT_CLASS_TYPE_ERROR:
13790  return rb_eTypeError;
13791  case IBF_OBJECT_CLASS_NO_MATCHING_PATTERN_KEY_ERROR:
13793  }
13794 
13795  rb_raise(rb_eArgError, "ibf_load_object_class: unknown class (%d)", (int)cindex);
13796 }
13797 
13798 
13799 static void
13800 ibf_dump_object_float(struct ibf_dump *dump, VALUE obj)
13801 {
13802  double dbl = RFLOAT_VALUE(obj);
13803  (void)IBF_W(&dbl, double, 1);
13804 }
13805 
13806 static VALUE
13807 ibf_load_object_float(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
13808 {
13809  const double *dblp = IBF_OBJBODY(double, offset);
13810  return DBL2NUM(*dblp);
13811 }
13812 
13813 static void
13814 ibf_dump_object_string(struct ibf_dump *dump, VALUE obj)
13815 {
13816  long encindex = (long)rb_enc_get_index(obj);
13817  long len = RSTRING_LEN(obj);
13818  const char *ptr = RSTRING_PTR(obj);
13819 
13820  if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
13821  rb_encoding *enc = rb_enc_from_index((int)encindex);
13822  const char *enc_name = rb_enc_name(enc);
13823  encindex = RUBY_ENCINDEX_BUILTIN_MAX + ibf_dump_object(dump, rb_str_new2(enc_name));
13824  }
13825 
13826  ibf_dump_write_small_value(dump, encindex);
13827  ibf_dump_write_small_value(dump, len);
13828  IBF_WP(ptr, char, len);
13829 }
13830 
13831 static VALUE
13832 ibf_load_object_string(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
13833 {
13834  ibf_offset_t reading_pos = offset;
13835 
13836  int encindex = (int)ibf_load_small_value(load, &reading_pos);
13837  const long len = (long)ibf_load_small_value(load, &reading_pos);
13838  const char *ptr = load->current_buffer->buff + reading_pos;
13839 
13840  if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
13841  VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
13842  encindex = rb_enc_find_index(RSTRING_PTR(enc_name_str));
13843  }
13844 
13845  VALUE str;
13846  if (header->frozen && !header->internal) {
13847  str = rb_enc_literal_str(ptr, len, rb_enc_from_index(encindex));
13848  }
13849  else {
13850  str = rb_enc_str_new(ptr, len, rb_enc_from_index(encindex));
13851 
13852  if (header->internal) rb_obj_hide(str);
13853  if (header->frozen) str = rb_fstring(str);
13854  }
13855  return str;
13856 }
13857 
13858 static void
13859 ibf_dump_object_regexp(struct ibf_dump *dump, VALUE obj)
13860 {
13861  VALUE srcstr = RREGEXP_SRC(obj);
13862  struct ibf_object_regexp regexp;
13863  regexp.option = (char)rb_reg_options(obj);
13864  regexp.srcstr = (long)ibf_dump_object(dump, srcstr);
13865 
13866  ibf_dump_write_byte(dump, (unsigned char)regexp.option);
13867  ibf_dump_write_small_value(dump, regexp.srcstr);
13868 }
13869 
13870 static VALUE
13871 ibf_load_object_regexp(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
13872 {
13873  struct ibf_object_regexp regexp;
13874  regexp.option = ibf_load_byte(load, &offset);
13875  regexp.srcstr = ibf_load_small_value(load, &offset);
13876 
13877  VALUE srcstr = ibf_load_object(load, regexp.srcstr);
13878  VALUE reg = rb_reg_compile(srcstr, (int)regexp.option, NULL, 0);
13879 
13880  if (header->internal) rb_obj_hide(reg);
13881  if (header->frozen) rb_obj_freeze(reg);
13882 
13883  return reg;
13884 }
13885 
13886 static void
13887 ibf_dump_object_array(struct ibf_dump *dump, VALUE obj)
13888 {
13889  long i, len = RARRAY_LEN(obj);
13890  ibf_dump_write_small_value(dump, len);
13891  for (i=0; i<len; i++) {
13892  long index = (long)ibf_dump_object(dump, RARRAY_AREF(obj, i));
13893  ibf_dump_write_small_value(dump, index);
13894  }
13895 }
13896 
13897 static VALUE
13898 ibf_load_object_array(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
13899 {
13900  ibf_offset_t reading_pos = offset;
13901 
13902  const long len = (long)ibf_load_small_value(load, &reading_pos);
13903 
13904  VALUE ary = header->internal ? rb_ary_hidden_new(len) : rb_ary_new_capa(len);
13905  int i;
13906 
13907  for (i=0; i<len; i++) {
13908  const VALUE index = ibf_load_small_value(load, &reading_pos);
13909  rb_ary_push(ary, ibf_load_object(load, index));
13910  }
13911 
13912  if (header->frozen) rb_ary_freeze(ary);
13913 
13914  return ary;
13915 }
13916 
13917 static int
13918 ibf_dump_object_hash_i(st_data_t key, st_data_t val, st_data_t ptr)
13919 {
13920  struct ibf_dump *dump = (struct ibf_dump *)ptr;
13921 
13922  VALUE key_index = ibf_dump_object(dump, (VALUE)key);
13923  VALUE val_index = ibf_dump_object(dump, (VALUE)val);
13924 
13925  ibf_dump_write_small_value(dump, key_index);
13926  ibf_dump_write_small_value(dump, val_index);
13927  return ST_CONTINUE;
13928 }
13929 
13930 static void
13931 ibf_dump_object_hash(struct ibf_dump *dump, VALUE obj)
13932 {
13933  long len = RHASH_SIZE(obj);
13934  ibf_dump_write_small_value(dump, (VALUE)len);
13935 
13936  if (len > 0) rb_hash_foreach(obj, ibf_dump_object_hash_i, (VALUE)dump);
13937 }
13938 
13939 static VALUE
13940 ibf_load_object_hash(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
13941 {
13942  long len = (long)ibf_load_small_value(load, &offset);
13943  VALUE obj = rb_hash_new_with_size(len);
13944  int i;
13945 
13946  for (i = 0; i < len; i++) {
13947  VALUE key_index = ibf_load_small_value(load, &offset);
13948  VALUE val_index = ibf_load_small_value(load, &offset);
13949 
13950  VALUE key = ibf_load_object(load, key_index);
13951  VALUE val = ibf_load_object(load, val_index);
13952  rb_hash_aset(obj, key, val);
13953  }
13954  rb_hash_rehash(obj);
13955 
13956  if (header->internal) rb_obj_hide(obj);
13957  if (header->frozen) rb_obj_freeze(obj);
13958 
13959  return obj;
13960 }
13961 
13962 static void
13963 ibf_dump_object_struct(struct ibf_dump *dump, VALUE obj)
13964 {
13965  if (rb_obj_is_kind_of(obj, rb_cRange)) {
13966  struct ibf_object_struct_range range;
13967  VALUE beg, end;
13968  IBF_ZERO(range);
13969  range.len = 3;
13970  range.class_index = 0;
13971 
13972  rb_range_values(obj, &beg, &end, &range.excl);
13973  range.beg = (long)ibf_dump_object(dump, beg);
13974  range.end = (long)ibf_dump_object(dump, end);
13975 
13976  IBF_W_ALIGN(struct ibf_object_struct_range);
13977  IBF_WV(range);
13978  }
13979  else {
13980  rb_raise(rb_eNotImpError, "ibf_dump_object_struct: unsupported class %"PRIsVALUE,
13981  rb_class_name(CLASS_OF(obj)));
13982  }
13983 }
13984 
13985 static VALUE
13986 ibf_load_object_struct(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
13987 {
13988  const struct ibf_object_struct_range *range = IBF_OBJBODY(struct ibf_object_struct_range, offset);
13989  VALUE beg = ibf_load_object(load, range->beg);
13990  VALUE end = ibf_load_object(load, range->end);
13991  VALUE obj = rb_range_new(beg, end, range->excl);
13992  if (header->internal) rb_obj_hide(obj);
13993  if (header->frozen) rb_obj_freeze(obj);
13994  return obj;
13995 }
13996 
13997 static void
13998 ibf_dump_object_bignum(struct ibf_dump *dump, VALUE obj)
13999 {
14000  ssize_t len = BIGNUM_LEN(obj);
14001  ssize_t slen = BIGNUM_SIGN(obj) > 0 ? len : len * -1;
14002  BDIGIT *d = BIGNUM_DIGITS(obj);
14003 
14004  (void)IBF_W(&slen, ssize_t, 1);
14005  IBF_WP(d, BDIGIT, len);
14006 }
14007 
14008 static VALUE
14009 ibf_load_object_bignum(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
14010 {
14011  const struct ibf_object_bignum *bignum = IBF_OBJBODY(struct ibf_object_bignum, offset);
14012  int sign = bignum->slen > 0;
14013  ssize_t len = sign > 0 ? bignum->slen : -1 * bignum->slen;
14014  const int big_unpack_flags = /* c.f. rb_big_unpack() */
14017  VALUE obj = rb_integer_unpack(bignum->digits, len, sizeof(BDIGIT), 0,
14018  big_unpack_flags |
14019  (sign == 0 ? INTEGER_PACK_NEGATIVE : 0));
14020  if (header->internal) rb_obj_hide(obj);
14021  if (header->frozen) rb_obj_freeze(obj);
14022  return obj;
14023 }
14024 
14025 static void
14026 ibf_dump_object_data(struct ibf_dump *dump, VALUE obj)
14027 {
14028  if (rb_data_is_encoding(obj)) {
14029  rb_encoding *enc = rb_to_encoding(obj);
14030  const char *name = rb_enc_name(enc);
14031  long len = strlen(name) + 1;
14032  long data[2];
14033  data[0] = IBF_OBJECT_DATA_ENCODING;
14034  data[1] = len;
14035  (void)IBF_W(data, long, 2);
14036  IBF_WP(name, char, len);
14037  }
14038  else {
14039  ibf_dump_object_unsupported(dump, obj);
14040  }
14041 }
14042 
14043 static VALUE
14044 ibf_load_object_data(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
14045 {
14046  const long *body = IBF_OBJBODY(long, offset);
14047  const enum ibf_object_data_type type = (enum ibf_object_data_type)body[0];
14048  /* const long len = body[1]; */
14049  const char *data = (const char *)&body[2];
14050 
14051  switch (type) {
14052  case IBF_OBJECT_DATA_ENCODING:
14053  {
14054  VALUE encobj = rb_enc_from_encoding(rb_enc_find(data));
14055  return encobj;
14056  }
14057  }
14058 
14059  return ibf_load_object_unsupported(load, header, offset);
14060 }
14061 
14062 static void
14063 ibf_dump_object_complex_rational(struct ibf_dump *dump, VALUE obj)
14064 {
14065  long data[2];
14066  data[0] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->real);
14067  data[1] = (long)ibf_dump_object(dump, RCOMPLEX(obj)->imag);
14068 
14069  (void)IBF_W(data, long, 2);
14070 }
14071 
14072 static VALUE
14073 ibf_load_object_complex_rational(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
14074 {
14075  const struct ibf_object_complex_rational *nums = IBF_OBJBODY(struct ibf_object_complex_rational, offset);
14076  VALUE a = ibf_load_object(load, nums->a);
14077  VALUE b = ibf_load_object(load, nums->b);
14078  VALUE obj = header->type == T_COMPLEX ?
14079  rb_complex_new(a, b) : rb_rational_new(a, b);
14080 
14081  if (header->internal) rb_obj_hide(obj);
14082  if (header->frozen) rb_obj_freeze(obj);
14083  return obj;
14084 }
14085 
14086 static void
14087 ibf_dump_object_symbol(struct ibf_dump *dump, VALUE obj)
14088 {
14089  ibf_dump_object_string(dump, rb_sym2str(obj));
14090 }
14091 
14092 static VALUE
14093 ibf_load_object_symbol(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset)
14094 {
14095  ibf_offset_t reading_pos = offset;
14096 
14097  int encindex = (int)ibf_load_small_value(load, &reading_pos);
14098  const long len = (long)ibf_load_small_value(load, &reading_pos);
14099  const char *ptr = load->current_buffer->buff + reading_pos;
14100 
14101  if (encindex > RUBY_ENCINDEX_BUILTIN_MAX) {
14102  VALUE enc_name_str = ibf_load_object(load, encindex - RUBY_ENCINDEX_BUILTIN_MAX);
14103  encindex = rb_enc_find_index(RSTRING_PTR(enc_name_str));
14104  }
14105 
14106  ID id = rb_intern3(ptr, len, rb_enc_from_index(encindex));
14107  return ID2SYM(id);
14108 }
14109 
14110 typedef void (*ibf_dump_object_function)(struct ibf_dump *dump, VALUE obj);
14111 static const ibf_dump_object_function dump_object_functions[RUBY_T_MASK+1] = {
14112  ibf_dump_object_unsupported, /* T_NONE */
14113  ibf_dump_object_unsupported, /* T_OBJECT */
14114  ibf_dump_object_class, /* T_CLASS */
14115  ibf_dump_object_unsupported, /* T_MODULE */
14116  ibf_dump_object_float, /* T_FLOAT */
14117  ibf_dump_object_string, /* T_STRING */
14118  ibf_dump_object_regexp, /* T_REGEXP */
14119  ibf_dump_object_array, /* T_ARRAY */
14120  ibf_dump_object_hash, /* T_HASH */
14121  ibf_dump_object_struct, /* T_STRUCT */
14122  ibf_dump_object_bignum, /* T_BIGNUM */
14123  ibf_dump_object_unsupported, /* T_FILE */
14124  ibf_dump_object_data, /* T_DATA */
14125  ibf_dump_object_unsupported, /* T_MATCH */
14126  ibf_dump_object_complex_rational, /* T_COMPLEX */
14127  ibf_dump_object_complex_rational, /* T_RATIONAL */
14128  ibf_dump_object_unsupported, /* 0x10 */
14129  ibf_dump_object_unsupported, /* 0x11 T_NIL */
14130  ibf_dump_object_unsupported, /* 0x12 T_TRUE */
14131  ibf_dump_object_unsupported, /* 0x13 T_FALSE */
14132  ibf_dump_object_symbol, /* 0x14 T_SYMBOL */
14133  ibf_dump_object_unsupported, /* T_FIXNUM */
14134  ibf_dump_object_unsupported, /* T_UNDEF */
14135  ibf_dump_object_unsupported, /* 0x17 */
14136  ibf_dump_object_unsupported, /* 0x18 */
14137  ibf_dump_object_unsupported, /* 0x19 */
14138  ibf_dump_object_unsupported, /* T_IMEMO 0x1a */
14139  ibf_dump_object_unsupported, /* T_NODE 0x1b */
14140  ibf_dump_object_unsupported, /* T_ICLASS 0x1c */
14141  ibf_dump_object_unsupported, /* T_ZOMBIE 0x1d */
14142  ibf_dump_object_unsupported, /* 0x1e */
14143  ibf_dump_object_unsupported, /* 0x1f */
14144 };
14145 
14146 static void
14147 ibf_dump_object_object_header(struct ibf_dump *dump, const struct ibf_object_header header)
14148 {
14149  unsigned char byte =
14150  (header.type << 0) |
14151  (header.special_const << 5) |
14152  (header.frozen << 6) |
14153  (header.internal << 7);
14154 
14155  IBF_WV(byte);
14156 }
14157 
14158 static struct ibf_object_header
14159 ibf_load_object_object_header(const struct ibf_load *load, ibf_offset_t *offset)
14160 {
14161  unsigned char byte = ibf_load_byte(load, offset);
14162 
14163  struct ibf_object_header header;
14164  header.type = (byte >> 0) & 0x1f;
14165  header.special_const = (byte >> 5) & 0x01;
14166  header.frozen = (byte >> 6) & 0x01;
14167  header.internal = (byte >> 7) & 0x01;
14168 
14169  return header;
14170 }
14171 
14172 static ibf_offset_t
14173 ibf_dump_object_object(struct ibf_dump *dump, VALUE obj)
14174 {
14175  struct ibf_object_header obj_header;
14176  ibf_offset_t current_offset;
14177  IBF_ZERO(obj_header);
14178  obj_header.type = TYPE(obj);
14179 
14180  IBF_W_ALIGN(ibf_offset_t);
14181  current_offset = ibf_dump_pos(dump);
14182 
14183  if (SPECIAL_CONST_P(obj) &&
14184  ! (SYMBOL_P(obj) ||
14185  RB_FLOAT_TYPE_P(obj))) {
14186  obj_header.special_const = TRUE;
14187  obj_header.frozen = TRUE;
14188  obj_header.internal = TRUE;
14189  ibf_dump_object_object_header(dump, obj_header);
14190  ibf_dump_write_small_value(dump, obj);
14191  }
14192  else {
14193  obj_header.internal = SPECIAL_CONST_P(obj) ? FALSE : (RBASIC_CLASS(obj) == 0) ? TRUE : FALSE;
14194  obj_header.special_const = FALSE;
14195  obj_header.frozen = FL_TEST(obj, FL_FREEZE) ? TRUE : FALSE;
14196  ibf_dump_object_object_header(dump, obj_header);
14197  (*dump_object_functions[obj_header.type])(dump, obj);
14198  }
14199 
14200  return current_offset;
14201 }
14202 
14203 typedef VALUE (*ibf_load_object_function)(const struct ibf_load *load, const struct ibf_object_header *header, ibf_offset_t offset);
14204 static const ibf_load_object_function load_object_functions[RUBY_T_MASK+1] = {
14205  ibf_load_object_unsupported, /* T_NONE */
14206  ibf_load_object_unsupported, /* T_OBJECT */
14207  ibf_load_object_class, /* T_CLASS */
14208  ibf_load_object_unsupported, /* T_MODULE */
14209  ibf_load_object_float, /* T_FLOAT */
14210  ibf_load_object_string, /* T_STRING */
14211  ibf_load_object_regexp, /* T_REGEXP */
14212  ibf_load_object_array, /* T_ARRAY */
14213  ibf_load_object_hash, /* T_HASH */
14214  ibf_load_object_struct, /* T_STRUCT */
14215  ibf_load_object_bignum, /* T_BIGNUM */
14216  ibf_load_object_unsupported, /* T_FILE */
14217  ibf_load_object_data, /* T_DATA */
14218  ibf_load_object_unsupported, /* T_MATCH */
14219  ibf_load_object_complex_rational, /* T_COMPLEX */
14220  ibf_load_object_complex_rational, /* T_RATIONAL */
14221  ibf_load_object_unsupported, /* 0x10 */
14222  ibf_load_object_unsupported, /* T_NIL */
14223  ibf_load_object_unsupported, /* T_TRUE */
14224  ibf_load_object_unsupported, /* T_FALSE */
14225  ibf_load_object_symbol,
14226  ibf_load_object_unsupported, /* T_FIXNUM */
14227  ibf_load_object_unsupported, /* T_UNDEF */
14228  ibf_load_object_unsupported, /* 0x17 */
14229  ibf_load_object_unsupported, /* 0x18 */
14230  ibf_load_object_unsupported, /* 0x19 */
14231  ibf_load_object_unsupported, /* T_IMEMO 0x1a */
14232  ibf_load_object_unsupported, /* T_NODE 0x1b */
14233  ibf_load_object_unsupported, /* T_ICLASS 0x1c */
14234  ibf_load_object_unsupported, /* T_ZOMBIE 0x1d */
14235  ibf_load_object_unsupported, /* 0x1e */
14236  ibf_load_object_unsupported, /* 0x1f */
14237 };
14238 
14239 static VALUE
14240 ibf_load_object(const struct ibf_load *load, VALUE object_index)
14241 {
14242  if (object_index == 0) {
14243  return Qnil;
14244  }
14245  else {
14246  VALUE obj = pinned_list_fetch(load->current_buffer->obj_list, (long)object_index);
14247  if (!obj) {
14248  ibf_offset_t *offsets = (ibf_offset_t *)(load->current_buffer->obj_list_offset + load->current_buffer->buff);
14249  ibf_offset_t offset = offsets[object_index];
14250  const struct ibf_object_header header = ibf_load_object_object_header(load, &offset);
14251 
14252 #if IBF_ISEQ_DEBUG
14253  fprintf(stderr, "ibf_load_object: list=%#x offsets=%p offset=%#x\n",
14254  load->current_buffer->obj_list_offset, (void *)offsets, offset);
14255  fprintf(stderr, "ibf_load_object: type=%#x special=%d frozen=%d internal=%d\n",
14256  header.type, header.special_const, header.frozen, header.internal);
14257 #endif
14258  if (offset >= load->current_buffer->size) {
14259  rb_raise(rb_eIndexError, "object offset out of range: %u", offset);
14260  }
14261 
14262  if (header.special_const) {
14263  ibf_offset_t reading_pos = offset;
14264 
14265  obj = ibf_load_small_value(load, &reading_pos);
14266  }
14267  else {
14268  obj = (*load_object_functions[header.type])(load, &header, offset);
14269  }
14270 
14271  pinned_list_store(load->current_buffer->obj_list, (long)object_index, obj);
14272  }
14273 #if IBF_ISEQ_DEBUG
14274  fprintf(stderr, "ibf_load_object: index=%#"PRIxVALUE" obj=%#"PRIxVALUE"\n",
14275  object_index, obj);
14276 #endif
14277  return obj;
14278  }
14279 }
14280 
14282 {
14283  struct ibf_dump *dump;
14284  VALUE offset_list;
14285 };
14286 
14287 static int
14288 ibf_dump_object_list_i(st_data_t key, st_data_t val, st_data_t ptr)
14289 {
14290  VALUE obj = (VALUE)key;
14291  struct ibf_dump_object_list_arg *args = (struct ibf_dump_object_list_arg *)ptr;
14292 
14293  ibf_offset_t offset = ibf_dump_object_object(args->dump, obj);
14294  rb_ary_push(args->offset_list, UINT2NUM(offset));
14295 
14296  return ST_CONTINUE;
14297 }
14298 
14299 static void
14300 ibf_dump_object_list(struct ibf_dump *dump, ibf_offset_t *obj_list_offset, unsigned int *obj_list_size)
14301 {
14302  st_table *obj_table = dump->current_buffer->obj_table;
14303  VALUE offset_list = rb_ary_hidden_new(obj_table->num_entries);
14304 
14305  struct ibf_dump_object_list_arg args;
14306  args.dump = dump;
14307  args.offset_list = offset_list;
14308 
14309  st_foreach(obj_table, ibf_dump_object_list_i, (st_data_t)&args);
14310 
14311  IBF_W_ALIGN(ibf_offset_t);
14312  *obj_list_offset = ibf_dump_pos(dump);
14313 
14314  st_index_t size = obj_table->num_entries;
14315  st_index_t i;
14316 
14317  for (i=0; i<size; i++) {
14318  ibf_offset_t offset = NUM2UINT(RARRAY_AREF(offset_list, i));
14319  IBF_WV(offset);
14320  }
14321 
14322  *obj_list_size = (unsigned int)size;
14323 }
14324 
14325 static void
14326 ibf_dump_mark(void *ptr)
14327 {
14328  struct ibf_dump *dump = (struct ibf_dump *)ptr;
14329  rb_gc_mark(dump->global_buffer.str);
14330 
14331  rb_mark_set(dump->global_buffer.obj_table);
14332  rb_mark_set(dump->iseq_table);
14333 }
14334 
14335 static void
14336 ibf_dump_free(void *ptr)
14337 {
14338  struct ibf_dump *dump = (struct ibf_dump *)ptr;
14339  if (dump->global_buffer.obj_table) {
14340  st_free_table(dump->global_buffer.obj_table);
14341  dump->global_buffer.obj_table = 0;
14342  }
14343  if (dump->iseq_table) {
14344  st_free_table(dump->iseq_table);
14345  dump->iseq_table = 0;
14346  }
14347 }
14348 
14349 static size_t
14350 ibf_dump_memsize(const void *ptr)
14351 {
14352  struct ibf_dump *dump = (struct ibf_dump *)ptr;
14353  size_t size = 0;
14354  if (dump->iseq_table) size += st_memsize(dump->iseq_table);
14355  if (dump->global_buffer.obj_table) size += st_memsize(dump->global_buffer.obj_table);
14356  return size;
14357 }
14358 
14359 static const rb_data_type_t ibf_dump_type = {
14360  "ibf_dump",
14361  {ibf_dump_mark, ibf_dump_free, ibf_dump_memsize,},
14362  0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_EMBEDDABLE
14363 };
14364 
14365 static void
14366 ibf_dump_setup(struct ibf_dump *dump, VALUE dumper_obj)
14367 {
14368  dump->global_buffer.obj_table = NULL; // GC may run before a value is assigned
14369  dump->iseq_table = NULL;
14370 
14371  RB_OBJ_WRITE(dumper_obj, &dump->global_buffer.str, rb_str_new(0, 0));
14372  dump->global_buffer.obj_table = ibf_dump_object_table_new();
14373  dump->iseq_table = st_init_numtable(); /* need free */
14374 
14375  dump->current_buffer = &dump->global_buffer;
14376 }
14377 
14378 VALUE
14379 rb_iseq_ibf_dump(const rb_iseq_t *iseq, VALUE opt)
14380 {
14381  struct ibf_dump *dump;
14382  struct ibf_header header = {{0}};
14383  VALUE dump_obj;
14384  VALUE str;
14385 
14386  if (ISEQ_BODY(iseq)->parent_iseq != NULL ||
14387  ISEQ_BODY(iseq)->local_iseq != iseq) {
14388  rb_raise(rb_eRuntimeError, "should be top of iseq");
14389  }
14390  if (RTEST(ISEQ_COVERAGE(iseq))) {
14391  rb_raise(rb_eRuntimeError, "should not compile with coverage");
14392  }
14393 
14394  dump_obj = TypedData_Make_Struct(0, struct ibf_dump, &ibf_dump_type, dump);
14395  ibf_dump_setup(dump, dump_obj);
14396 
14397  ibf_dump_write(dump, &header, sizeof(header));
14398  ibf_dump_iseq(dump, iseq);
14399 
14400  header.magic[0] = 'Y'; /* YARB */
14401  header.magic[1] = 'A';
14402  header.magic[2] = 'R';
14403  header.magic[3] = 'B';
14404  header.major_version = IBF_MAJOR_VERSION;
14405  header.minor_version = IBF_MINOR_VERSION;
14406  header.endian = IBF_ENDIAN_MARK;
14407  header.wordsize = (uint8_t)SIZEOF_VALUE;
14408  ibf_dump_iseq_list(dump, &header);
14409  ibf_dump_object_list(dump, &header.global_object_list_offset, &header.global_object_list_size);
14410  header.size = ibf_dump_pos(dump);
14411 
14412  if (RTEST(opt)) {
14413  VALUE opt_str = opt;
14414  const char *ptr = StringValuePtr(opt_str);
14415  header.extra_size = RSTRING_LENINT(opt_str);
14416  ibf_dump_write(dump, ptr, header.extra_size);
14417  }
14418  else {
14419  header.extra_size = 0;
14420  }
14421 
14422  ibf_dump_overwrite(dump, &header, sizeof(header), 0);
14423 
14424  str = dump->global_buffer.str;
14425  RB_GC_GUARD(dump_obj);
14426  return str;
14427 }
14428 
14429 static const ibf_offset_t *
14430 ibf_iseq_list(const struct ibf_load *load)
14431 {
14432  return (const ibf_offset_t *)(load->global_buffer.buff + load->header->iseq_list_offset);
14433 }
14434 
14435 void
14436 rb_ibf_load_iseq_complete(rb_iseq_t *iseq)
14437 {
14438  struct ibf_load *load = RTYPEDDATA_DATA(iseq->aux.loader.obj);
14439  rb_iseq_t *prev_src_iseq = load->iseq;
14440  ibf_offset_t offset = ibf_iseq_list(load)[iseq->aux.loader.index];
14441  load->iseq = iseq;
14442 #if IBF_ISEQ_DEBUG
14443  fprintf(stderr, "rb_ibf_load_iseq_complete: index=%#x offset=%#x size=%#x\n",
14444  iseq->aux.loader.index, offset,
14445  load->header->size);
14446 #endif
14447  ibf_load_iseq_each(load, iseq, offset);
14448  ISEQ_COMPILE_DATA_CLEAR(iseq);
14449  FL_UNSET((VALUE)iseq, ISEQ_NOT_LOADED_YET);
14450  rb_iseq_init_trace(iseq);
14451  load->iseq = prev_src_iseq;
14452 }
14453 
14454 #if USE_LAZY_LOAD
14455 const rb_iseq_t *
14456 rb_iseq_complete(const rb_iseq_t *iseq)
14457 {
14458  rb_ibf_load_iseq_complete((rb_iseq_t *)iseq);
14459  return iseq;
14460 }
14461 #endif
14462 
14463 static rb_iseq_t *
14464 ibf_load_iseq(const struct ibf_load *load, const rb_iseq_t *index_iseq)
14465 {
14466  int iseq_index = (int)(VALUE)index_iseq;
14467 
14468 #if IBF_ISEQ_DEBUG
14469  fprintf(stderr, "ibf_load_iseq: index_iseq=%p iseq_list=%p\n",
14470  (void *)index_iseq, (void *)load->iseq_list);
14471 #endif
14472  if (iseq_index == -1) {
14473  return NULL;
14474  }
14475  else {
14476  VALUE iseqv = pinned_list_fetch(load->iseq_list, iseq_index);
14477 
14478 #if IBF_ISEQ_DEBUG
14479  fprintf(stderr, "ibf_load_iseq: iseqv=%p\n", (void *)iseqv);
14480 #endif
14481  if (iseqv) {
14482  return (rb_iseq_t *)iseqv;
14483  }
14484  else {
14485  rb_iseq_t *iseq = iseq_imemo_alloc();
14486 #if IBF_ISEQ_DEBUG
14487  fprintf(stderr, "ibf_load_iseq: new iseq=%p\n", (void *)iseq);
14488 #endif
14489  FL_SET((VALUE)iseq, ISEQ_NOT_LOADED_YET);
14490  iseq->aux.loader.obj = load->loader_obj;
14491  iseq->aux.loader.index = iseq_index;
14492 #if IBF_ISEQ_DEBUG
14493  fprintf(stderr, "ibf_load_iseq: iseq=%p loader_obj=%p index=%d\n",
14494  (void *)iseq, (void *)load->loader_obj, iseq_index);
14495 #endif
14496  pinned_list_store(load->iseq_list, iseq_index, (VALUE)iseq);
14497 
14498  if (!USE_LAZY_LOAD || GET_VM()->builtin_function_table) {
14499 #if IBF_ISEQ_DEBUG
14500  fprintf(stderr, "ibf_load_iseq: loading iseq=%p\n", (void *)iseq);
14501 #endif
14502  rb_ibf_load_iseq_complete(iseq);
14503  }
14504 
14505 #if IBF_ISEQ_DEBUG
14506  fprintf(stderr, "ibf_load_iseq: iseq=%p loaded %p\n",
14507  (void *)iseq, (void *)load->iseq);
14508 #endif
14509  return iseq;
14510  }
14511  }
14512 }
14513 
14514 static void
14515 ibf_load_setup_bytes(struct ibf_load *load, VALUE loader_obj, const char *bytes, size_t size)
14516 {
14517  struct ibf_header *header = (struct ibf_header *)bytes;
14518  load->loader_obj = loader_obj;
14519  load->global_buffer.buff = bytes;
14520  load->header = header;
14521  load->global_buffer.size = header->size;
14522  load->global_buffer.obj_list_offset = header->global_object_list_offset;
14523  load->global_buffer.obj_list_size = header->global_object_list_size;
14524  RB_OBJ_WRITE(loader_obj, &load->iseq_list, pinned_list_new(header->iseq_list_size));
14525  RB_OBJ_WRITE(loader_obj, &load->global_buffer.obj_list, pinned_list_new(load->global_buffer.obj_list_size));
14526  load->iseq = NULL;
14527 
14528  load->current_buffer = &load->global_buffer;
14529 
14530  if (size < header->size) {
14531  rb_raise(rb_eRuntimeError, "broken binary format");
14532  }
14533  if (strncmp(header->magic, "YARB", 4) != 0) {
14534  rb_raise(rb_eRuntimeError, "unknown binary format");
14535  }
14536  if (header->major_version != IBF_MAJOR_VERSION ||
14537  header->minor_version != IBF_MINOR_VERSION) {
14538  rb_raise(rb_eRuntimeError, "unmatched version file (%u.%u for %u.%u)",
14539  header->major_version, header->minor_version, IBF_MAJOR_VERSION, IBF_MINOR_VERSION);
14540  }
14541  if (header->endian != IBF_ENDIAN_MARK) {
14542  rb_raise(rb_eRuntimeError, "unmatched endian: %c", header->endian);
14543  }
14544  if (header->wordsize != SIZEOF_VALUE) {
14545  rb_raise(rb_eRuntimeError, "unmatched word size: %d", header->wordsize);
14546  }
14547  if (header->iseq_list_offset % RUBY_ALIGNOF(ibf_offset_t)) {
14548  rb_raise(rb_eArgError, "unaligned iseq list offset: %u",
14549  header->iseq_list_offset);
14550  }
14551  if (load->global_buffer.obj_list_offset % RUBY_ALIGNOF(ibf_offset_t)) {
14552  rb_raise(rb_eArgError, "unaligned object list offset: %u",
14553  load->global_buffer.obj_list_offset);
14554  }
14555 }
14556 
14557 static void
14558 ibf_load_setup(struct ibf_load *load, VALUE loader_obj, VALUE str)
14559 {
14560  StringValue(str);
14561 
14562  if (RSTRING_LENINT(str) < (int)sizeof(struct ibf_header)) {
14563  rb_raise(rb_eRuntimeError, "broken binary format");
14564  }
14565 
14566  if (USE_LAZY_LOAD) {
14567  str = rb_str_new(RSTRING_PTR(str), RSTRING_LEN(str));
14568  }
14569 
14570  ibf_load_setup_bytes(load, loader_obj, StringValuePtr(str), RSTRING_LEN(str));
14571  RB_OBJ_WRITE(loader_obj, &load->str, str);
14572 }
14573 
14574 static void
14575 ibf_loader_mark(void *ptr)
14576 {
14577  struct ibf_load *load = (struct ibf_load *)ptr;
14578  rb_gc_mark(load->str);
14579  rb_gc_mark(load->iseq_list);
14580  rb_gc_mark(load->global_buffer.obj_list);
14581 }
14582 
14583 static void
14584 ibf_loader_free(void *ptr)
14585 {
14586  struct ibf_load *load = (struct ibf_load *)ptr;
14587  ruby_xfree(load);
14588 }
14589 
14590 static size_t
14591 ibf_loader_memsize(const void *ptr)
14592 {
14593  return sizeof(struct ibf_load);
14594 }
14595 
14596 static const rb_data_type_t ibf_load_type = {
14597  "ibf_loader",
14598  {ibf_loader_mark, ibf_loader_free, ibf_loader_memsize,},
14599  0, 0, RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_FREE_IMMEDIATELY
14600 };
14601 
14602 const rb_iseq_t *
14603 rb_iseq_ibf_load(VALUE str)
14604 {
14605  struct ibf_load *load;
14606  rb_iseq_t *iseq;
14607  VALUE loader_obj = TypedData_Make_Struct(0, struct ibf_load, &ibf_load_type, load);
14608 
14609  ibf_load_setup(load, loader_obj, str);
14610  iseq = ibf_load_iseq(load, 0);
14611 
14612  RB_GC_GUARD(loader_obj);
14613  return iseq;
14614 }
14615 
14616 const rb_iseq_t *
14617 rb_iseq_ibf_load_bytes(const char *bytes, size_t size)
14618 {
14619  struct ibf_load *load;
14620  rb_iseq_t *iseq;
14621  VALUE loader_obj = TypedData_Make_Struct(0, struct ibf_load, &ibf_load_type, load);
14622 
14623  ibf_load_setup_bytes(load, loader_obj, bytes, size);
14624  iseq = ibf_load_iseq(load, 0);
14625 
14626  RB_GC_GUARD(loader_obj);
14627  return iseq;
14628 }
14629 
14630 VALUE
14631 rb_iseq_ibf_load_extra_data(VALUE str)
14632 {
14633  struct ibf_load *load;
14634  VALUE loader_obj = TypedData_Make_Struct(0, struct ibf_load, &ibf_load_type, load);
14635  VALUE extra_str;
14636 
14637  ibf_load_setup(load, loader_obj, str);
14638  extra_str = rb_str_new(load->global_buffer.buff + load->header->size, load->header->extra_size);
14639  RB_GC_GUARD(loader_obj);
14640  return extra_str;
14641 }
14642 
14643 #include "prism_compile.c"
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition: assert.h:219
#define LONG_LONG
Definition: long_long.h:38
#define RUBY_ALIGNOF
Wraps (or simulates) alignof.
Definition: stdalign.h:28
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition: event.h:40
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
Definition: event.h:43
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition: event.h:56
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition: event.h:39
#define RUBY_EVENT_NONE
No events.
Definition: event.h:37
#define RUBY_EVENT_LINE
Encountered a new line.
Definition: event.h:38
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition: event.h:42
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
Definition: event.h:44
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition: event.h:55
uint32_t rb_event_flag_t
Represents event(s).
Definition: event.h:108
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition: event.h:41
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
Definition: event.h:61
#define RBIMPL_ATTR_FORMAT(x, y, z)
Wraps (or simulates) __attribute__((format))
Definition: format.h:27
#define rb_str_new2
Old name of rb_str_new_cstr.
Definition: string.h:1675
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition: value_type.h:59
#define TYPE(_)
Old name of rb_type.
Definition: value_type.h:108
#define NUM2ULONG
Old name of RB_NUM2ULONG.
Definition: long.h:52
#define NUM2LL
Old name of RB_NUM2LL.
Definition: long_long.h:34
#define REALLOC_N
Old name of RB_REALLOC_N.
Definition: memory.h:398
#define ALLOCV
Old name of RB_ALLOCV.
Definition: memory.h:399
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition: double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition: value_type.h:78
#define xfree
Old name of ruby_xfree.
Definition: xmalloc.h:58
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition: long.h:48
#define T_NIL
Old name of RUBY_T_NIL.
Definition: value_type.h:72
#define UNREACHABLE
Old name of RBIMPL_UNREACHABLE.
Definition: assume.h:28
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition: value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition: symbol.h:44
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
Definition: value_type.h:57
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define OBJ_FREEZE
Old name of RB_OBJ_FREEZE.
Definition: fl_type.h:135
#define ULONG2NUM
Old name of RB_ULONG2NUM.
Definition: long.h:60
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition: assume.h:29
#define SYM2ID
Old name of RB_SYM2ID.
Definition: symbol.h:45
#define FIX2UINT
Old name of RB_FIX2UINT.
Definition: int.h:42
#define ZALLOC
Old name of RB_ZALLOC.
Definition: memory.h:397
#define CLASS_OF
Old name of rb_class_of.
Definition: globals.h:203
#define FIXABLE
Old name of RB_FIXABLE.
Definition: fixnum.h:25
#define xmalloc
Old name of ruby_xmalloc.
Definition: xmalloc.h:53
#define LONG2FIX
Old name of RB_INT2FIX.
Definition: long.h:49
#define FIX2INT
Old name of RB_FIX2INT.
Definition: int.h:41
#define NUM2UINT
Old name of RB_NUM2UINT.
Definition: int.h:45
#define ZALLOC_N
Old name of RB_ZALLOC_N.
Definition: memory.h:396
#define ASSUME
Old name of RBIMPL_ASSUME.
Definition: assume.h:27
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition: value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition: value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition: memory.h:394
#define FL_SET
Old name of RB_FL_SET.
Definition: fl_type.h:129
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
Definition: int.h:44
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition: long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition: value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition: value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition: double.h:29
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition: value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition: fl_type.h:131
#define FL_FREEZE
Old name of RUBY_FL_FREEZE.
Definition: fl_type.h:67
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition: long.h:51
#define FL_UNSET
Old name of RB_FL_UNSET.
Definition: fl_type.h:133
#define UINT2NUM
Old name of RB_UINT2NUM.
Definition: int.h:46
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define CONST_ID
Old name of RUBY_CONST_ID.
Definition: symbol.h:47
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition: memory.h:401
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition: value_type.h:88
#define T_REGEXP
Old name of RUBY_T_REGEXP.
Definition: value_type.h:77
#define ruby_debug
This variable controls whether the interpreter is in debug mode.
Definition: error.h:486
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
Definition: error.c:3635
VALUE rb_eNotImpError
NotImplementedError exception.
Definition: error.c:1418
void rb_bug(const char *fmt,...)
Interpreter panic switch.
Definition: error.c:1089
VALUE rb_eStandardError
StandardError exception.
Definition: error.c:1405
void rb_set_errinfo(VALUE err)
Sets the current exception ($!) to the given value.
Definition: eval.c:1926
VALUE rb_eTypeError
TypeError exception.
Definition: error.c:1408
VALUE rb_eNoMatchingPatternError
NoMatchingPatternError exception.
Definition: error.c:1421
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition: eval.c:689
VALUE rb_eRuntimeError
RuntimeError exception.
Definition: error.c:1406
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition: error.c:466
VALUE rb_eNoMatchingPatternKeyError
NoMatchingPatternKeyError exception.
Definition: error.c:1422
VALUE rb_eArgError
ArgumentError exception.
Definition: error.c:1409
VALUE rb_eIndexError
IndexError exception.
Definition: error.c:1410
VALUE rb_eSyntaxError
SyntaxError exception.
Definition: error.c:1425
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
Definition: error.h:57
VALUE rb_obj_reveal(VALUE obj, VALUE klass)
Make a hidden object visible again.
Definition: object.c:113
VALUE rb_cArray
Array class.
Definition: array.c:40
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition: object.c:104
VALUE rb_cHash
Hash class.
Definition: hash.c:113
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition: object.c:680
VALUE rb_cRange
Range class.
Definition: range.c:31
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition: object.c:865
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition: object.c:1260
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition: gc.h:615
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition: gc.h:603
int rb_enc_get_index(VALUE obj)
Queries the index of the encoding of the passed object, if any.
Definition: encoding.c:920
rb_encoding * rb_enc_find(const char *name)
Identical to rb_find_encoding(), except it takes a C's string instead of Ruby's.
Definition: encoding.c:859
rb_encoding * rb_to_encoding(VALUE obj)
Identical to rb_find_encoding(), except it raises an exception instead of returning NULL.
Definition: encoding.c:323
VALUE rb_enc_from_encoding(rb_encoding *enc)
Queries the Ruby-level counterpart instance of rb_cEncoding that corresponds to the passed encoding.
Definition: encoding.c:182
rb_encoding * rb_enc_from_index(int idx)
Identical to rb_find_encoding(), except it takes an encoding index instead of a Ruby object.
Definition: encoding.c:402
static const char * rb_enc_name(rb_encoding *enc)
Queries the (canonical) name of the passed encoding.
Definition: encoding.h:417
int rb_enc_find_index(const char *name)
Queries the index of the encoding.
Definition: encoding.c:824
VALUE rb_enc_str_new(const char *ptr, long len, rb_encoding *enc)
Identical to rb_str_new(), except it additionally takes an encoding.
Definition: string.c:1068
ID rb_intern3(const char *name, long len, rb_encoding *enc)
Identical to rb_intern2(), except it additionally takes an encoding.
Definition: symbol.c:752
void rb_gc_mark(VALUE obj)
Marks an object.
Definition: gc.c:2109
void rb_memerror(void)
Triggers out-of-memory error.
Definition: gc.c:4188
void rb_mark_set(struct st_table *tbl)
Identical to rb_mark_hash(), except it marks only keys of the table and leave their associated values...
Definition: gc.c:2205
VALUE rb_ary_reverse(VALUE ary)
Destructively reverses the passed array in-place.
Definition: array.c:3119
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
Definition: array.c:2777
VALUE rb_ary_unshift(VALUE ary, VALUE elem)
Destructively prepends the passed item at the beginning of the passed array.
Definition: array.c:1719
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
Definition: array.c:1397
VALUE rb_ary_new(void)
Allocates a new, empty array.
Definition: array.c:747
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
Definition: array.c:741
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
Definition: array.c:859
VALUE rb_ary_clear(VALUE ary)
Destructively removes everything form an array.
Definition: array.c:4735
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
Definition: array.c:1384
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
Definition: array.c:648
VALUE rb_ary_new_from_args(long n,...)
Constructs an array from the passed objects.
Definition: array.c:753
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
Definition: array.c:1737
VALUE rb_ary_join(VALUE ary, VALUE sep)
Recursively stringises the elements of the passed array, flattens that result, then joins the sequenc...
Definition: array.c:2891
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
Definition: array.c:1207
#define INTEGER_PACK_NATIVE_BYTE_ORDER
Means either INTEGER_PACK_MSBYTE_FIRST or INTEGER_PACK_LSBYTE_FIRST, depending on the host processor'...
Definition: bignum.h:546
VALUE rb_integer_unpack(const void *words, size_t numwords, size_t wordsize, size_t nails, int flags)
Import an integer from a buffer.
Definition: bignum.c:3674
VALUE rb_big_cmp(VALUE lhs, VALUE rhs)
Compares the passed two bignums.
Definition: bignum.c:5449
VALUE rb_dbl2big(double d)
Converts a C's double into a bignum.
Definition: bignum.c:5285
#define INTEGER_PACK_NEGATIVE
Interprets the input as a signed negative number (unpack only).
Definition: bignum.h:564
#define INTEGER_PACK_LSWORD_FIRST
Stores/interprets the least significant word as the first word.
Definition: bignum.h:528
VALUE rb_complex_new(VALUE real, VALUE imag)
Constructs a Complex, by first multiplying the imaginary part with 1i then adds it to the real part.
Definition: complex.c:1755
void rb_hash_bulk_insert(long argc, const VALUE *argv, VALUE hash)
Inserts a list of key-value pairs into a hash table at once.
Definition: hash.c:4766
void rb_hash_foreach(VALUE hash, int(*func)(VALUE key, VALUE val, VALUE arg), VALUE arg)
Iterates over a hash.
VALUE rb_hash_freeze(VALUE obj)
Just another name of rb_obj_freeze.
Definition: hash.c:108
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
Definition: hash.c:2073
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
Definition: hash.c:2893
VALUE rb_hash_lookup(VALUE hash, VALUE key)
Identical to rb_hash_aref(), except it always returns RUBY_Qnil for misshits.
Definition: hash.c:2099
VALUE rb_hash_dup(VALUE hash)
Duplicates a hash.
Definition: hash.c:1563
VALUE rb_hash_clear(VALUE hash)
Swipes everything out of the passed hash table.
Definition: hash.c:2820
VALUE rb_hash_new(void)
Creates a new, empty hash object.
Definition: hash.c:1475
int rb_is_const_id(ID id)
Classifies the given ID, then sees if it is a constant.
Definition: symbol.c:1063
ID rb_id_attrset(ID id)
Calculates an ID of attribute writer.
Definition: symbol.c:121
int rb_is_attrset_id(ID id)
Classifies the given ID, then sees if it is an attribute writer.
Definition: symbol.c:1087
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition: range.c:1754
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
Definition: range.c:68
VALUE rb_rational_new(VALUE num, VALUE den)
Constructs a Rational, with reduction.
Definition: rational.c:1974
int rb_reg_options(VALUE re)
Queries the options of the passed regular expression.
Definition: re.c:4198
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition: string.c:3677
VALUE rb_str_tmp_new(long len)
Allocates a "temporary" string.
Definition: string.c:1671
int rb_str_hash_cmp(VALUE str1, VALUE str2)
Compares two strings.
Definition: string.c:4046
VALUE rb_str_cat2(VALUE, const char *)
Just another name of rb_str_cat_cstr.
st_index_t rb_str_hash(VALUE str)
Calculates a hash value of a string.
Definition: string.c:4032
VALUE rb_str_cat(VALUE dst, const char *src, long srclen)
Destructively appends the passed contents to the string.
Definition: string.c:3445
int rb_str_cmp(VALUE lhs, VALUE rhs)
Compares two strings, as in strcmp(3).
Definition: string.c:4102
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition: string.c:3919
VALUE rb_str_freeze(VALUE str)
This is the implementation of String#freeze.
Definition: string.c:3178
VALUE rb_str_new(const char *ptr, long len)
Allocates an instance of rb_cString.
Definition: string.c:1050
VALUE rb_str_new_cstr(const char *ptr)
Identical to rb_str_new(), except it assumes the passed pointer is a pointer to a C string.
Definition: string.c:1074
VALUE rb_class_name(VALUE obj)
Queries the name of the given object's class.
Definition: variable.c:412
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition: symbol.h:276
VALUE rb_id2sym(ID id)
Allocates an instance of rb_cSymbol that has the given id.
Definition: symbol.c:951
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
Definition: symbol.c:992
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
Definition: symbol.c:823
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition: symbol.c:970
ID rb_sym2id(VALUE obj)
Converts an instance of rb_cSymbol into an ID.
Definition: symbol.c:917
ID rb_intern_str(VALUE str)
Identical to rb_intern(), except it takes an instance of rb_cString.
Definition: symbol.c:829
VALUE rb_id2str(ID id)
Identical to rb_id2name(), except it returns a Ruby's String instead of C's.
Definition: symbol.c:986
RBIMPL_ATTR_NORETURN() void rb_eof_error(void)
Utility function to raise rb_eEOFError.
char * ptr
Pointer to the underlying memory region, of at least capa bytes.
Definition: io.h:2
int len
Length of the buffer.
Definition: io.h:8
VALUE rb_ractor_make_shareable(VALUE obj)
Destructively transforms the passed object so that multiple Ractors can share it.
Definition: ractor.c:3078
#define DECIMAL_SIZE_OF(expr)
An approximation of decimal representation size.
Definition: util.h:48
void ruby_qsort(void *, const size_t, const size_t, int(*)(const void *, const void *, void *), void *)
Reentrant implementation of quick sort.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
Definition: sprintf.c:1217
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
Definition: sprintf.c:1240
#define rb_long2int
Just another name of rb_long2int_inline.
Definition: long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition: memory.h:367
#define ALLOCA_N(type, n)
Definition: memory.h:287
#define MEMZERO(p, type, n)
Handy macro to erase a region of memory.
Definition: memory.h:355
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition: memory.h:162
#define RB_ALLOCV(v, n)
Identical to RB_ALLOCV_N(), except that it allocates a number of bytes and returns a void* .
Definition: memory.h:299
VALUE type(ANYARGS)
ANYARGS-ed function type.
Definition: cxxanyargs.hpp:56
int st_foreach(st_table *q, int_type *w, st_data_t e)
Iteration over the given table.
Definition: cxxanyargs.hpp:432
#define RARRAY_LEN
Just another name of rb_array_len.
Definition: rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition: rarray.h:281
static void RARRAY_ASET(VALUE ary, long i, VALUE v)
Assigns an object in an array.
Definition: rarray.h:386
#define RARRAY_AREF(a, i)
Definition: rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition: rarray.h:52
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
Definition: rbasic.h:150
#define RUBY_DEFAULT_FREE
This is a value you can set to RData::dfree.
Definition: rdata.h:78
void(* RUBY_DATA_FUNC)(void *)
This is the type of callbacks registered to RData.
Definition: rdata.h:104
#define RHASH_SIZE(h)
Queries the size of the hash.
Definition: rhash.h:69
static VALUE RREGEXP_SRC(VALUE rexp)
Convenient getter function.
Definition: rregexp.h:103
#define StringValue(v)
Ensures that the parameter object is a String.
Definition: rstring.h:66
#define StringValuePtr(v)
Identical to StringValue, except it returns a char*.
Definition: rstring.h:76
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
Definition: rstring.h:416
static int RSTRING_LENINT(VALUE str)
Identical to RSTRING_LEN(), except it differs for the return type.
Definition: rstring.h:468
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
Definition: rstring.h:367
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
Definition: rstring.h:89
#define RTYPEDDATA_DATA(v)
Convenient getter macro.
Definition: rtypeddata.h:102
VALUE rb_data_typed_object_zalloc(VALUE klass, size_t size, const rb_data_type_t *type)
Identical to rb_data_typed_object_wrap(), except it allocates a new data region internally instead of...
Definition: gc.c:996
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition: rtypeddata.h:515
#define TypedData_Wrap_Struct(klass, data_type, sval)
Converts sval, a pointer to your struct, into a Ruby object.
Definition: rtypeddata.h:449
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition: rtypeddata.h:497
void rb_p(VALUE obj)
Inspects an object.
Definition: io.c:9000
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition: stdarg.h:35
Definition: proc.c:29
Internal header for Complex.
Definition: complex.h:13
Internal header for Rational.
Definition: rational.h:16
Definition: iseq.h:269
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
Definition: vm_core.h:285
Definition: vm_core.h:288
Definition: iseq.h:240
This is the struct that holds necessary info for a struct.
Definition: rtypeddata.h:200
const char * wrap_struct_name
Name of structs of this kind.
Definition: rtypeddata.h:207
struct rb_iseq_constant_body::@154 param
parameter information
Definition: st.h:79
Definition: vm_core.h:297
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition: value.h:52
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
Definition: value.h:69
uintptr_t VALUE
Type that represents a Ruby object.
Definition: value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition: value_type.h:264
static bool rb_integer_type_p(VALUE obj)
Queries if the object is an instance of rb_cInteger.
Definition: value_type.h:204
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition: value_type.h:376
@ RUBY_T_MASK
Bitmask of ruby_value_type.
Definition: value_type.h:145
void * ruby_xmalloc2(size_t nelems, size_t elemsiz)
Identical to ruby_xmalloc(), except it allocates nelems * elemsiz bytes.
Definition: gc.c:4233
void * ruby_xmalloc(size_t size)
Allocates a storage instance.
Definition: gc.c:4215
void ruby_xfree(void *ptr)
Deallocates a storage instance.
Definition: gc.c:4299