Ruby 4.0.0dev (2025-12-03 revision 3aa674ad9923a4a362a29840fad8421a6be7131f)
prism_compile.c (3aa674ad9923a4a362a29840fad8421a6be7131f)
1#include "prism.h"
2#include "ruby/version.h"
3
9typedef struct {
11 int32_t line;
12
14 uint32_t node_id;
16
17/******************************************************************************/
18/* These macros operate on pm_node_location_t structs as opposed to NODE*s. */
19/******************************************************************************/
20
21#define PUSH_ADJUST(seq, location, label) \
22 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), (int) (location).line))
23
24#define PUSH_ADJUST_RESTORE(seq, label) \
25 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
26
27#define PUSH_INSN(seq, location, insn) \
28 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).node_id, BIN(insn), 0))
29
30#define PUSH_INSN1(seq, location, insn, op1) \
31 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).node_id, BIN(insn), 1, (VALUE)(op1)))
32
33#define PUSH_INSN2(seq, location, insn, op1, op2) \
34 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).node_id, BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
35
36#define PUSH_INSN3(seq, location, insn, op1, op2, op3) \
37 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).node_id, BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
38
39#define PUSH_INSNL(seq, location, insn, label) \
40 (PUSH_INSN1(seq, location, insn, label), LABEL_REF(label))
41
42#define PUSH_LABEL(seq, label) \
43 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
44
45#define PUSH_SEND_R(seq, location, id, argc, block, flag, keywords) \
46 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, (int) (location).line, (int) (location).node_id, (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
47
48#define PUSH_SEND(seq, location, id, argc) \
49 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
50
51#define PUSH_SEND_WITH_FLAG(seq, location, id, argc, flag) \
52 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)(flag), NULL)
53
54#define PUSH_SEND_WITH_BLOCK(seq, location, id, argc, block) \
55 PUSH_SEND_R((seq), location, (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
56
57#define PUSH_CALL(seq, location, id, argc) \
58 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
59
60#define PUSH_CALL_WITH_BLOCK(seq, location, id, argc, block) \
61 PUSH_SEND_R((seq), location, (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
62
63#define PUSH_TRACE(seq, event) \
64 ADD_ELEM((seq), (LINK_ELEMENT *) new_trace_body(iseq, (event), 0))
65
66#define PUSH_CATCH_ENTRY(type, ls, le, iseqv, lc) \
67 ADD_CATCH_ENTRY((type), (ls), (le), (iseqv), (lc))
68
69#define PUSH_SEQ(seq1, seq2) \
70 APPEND_LIST((seq1), (seq2))
71
72#define PUSH_SYNTHETIC_PUTNIL(seq, iseq) \
73 do { \
74 int lineno = ISEQ_COMPILE_DATA(iseq)->last_line; \
75 if (lineno == 0) lineno = FIX2INT(rb_iseq_first_lineno(iseq)); \
76 ADD_SYNTHETIC_INSN(seq, lineno, -1, putnil); \
77 } while (0)
78
79/******************************************************************************/
80/* These functions compile getlocal/setlocal instructions but operate on */
81/* prism locations instead of NODEs. */
82/******************************************************************************/
83
84static void
85pm_iseq_add_getlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, int line, int node_id, int idx, int level)
86{
87 if (iseq_local_block_param_p(iseq, idx, level)) {
88 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line, node_id, BIN(getblockparam), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
89 }
90 else {
91 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line, node_id, BIN(getlocal), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
92 }
93 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level), Qfalse);
94}
95
96static void
97pm_iseq_add_setlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, int line, int node_id, int idx, int level)
98{
99 if (iseq_local_block_param_p(iseq, idx, level)) {
100 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line, node_id, BIN(setblockparam), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
101 }
102 else {
103 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line, node_id, BIN(setlocal), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
104 }
105 update_lvar_state(iseq, level, idx);
106 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level), Qtrue);
107}
108
109#define PUSH_GETLOCAL(seq, location, idx, level) \
110 pm_iseq_add_getlocal(iseq, (seq), (int) (location).line, (int) (location).node_id, (idx), (level))
111
112#define PUSH_SETLOCAL(seq, location, idx, level) \
113 pm_iseq_add_setlocal(iseq, (seq), (int) (location).line, (int) (location).node_id, (idx), (level))
114
115/******************************************************************************/
116/* These are helper macros for the compiler. */
117/******************************************************************************/
118
119#define OLD_ISEQ NEW_ISEQ
120#undef NEW_ISEQ
121
122#define NEW_ISEQ(node, name, type, line_no) \
123 pm_new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
124
125#define OLD_CHILD_ISEQ NEW_CHILD_ISEQ
126#undef NEW_CHILD_ISEQ
127
128#define NEW_CHILD_ISEQ(node, name, type, line_no) \
129 pm_new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
130
131#define PM_COMPILE(node) \
132 pm_compile_node(iseq, (node), ret, popped, scope_node)
133
134#define PM_COMPILE_INTO_ANCHOR(_ret, node) \
135 pm_compile_node(iseq, (node), _ret, popped, scope_node)
136
137#define PM_COMPILE_POPPED(node) \
138 pm_compile_node(iseq, (node), ret, true, scope_node)
139
140#define PM_COMPILE_NOT_POPPED(node) \
141 pm_compile_node(iseq, (node), ret, false, scope_node)
142
143#define PM_NODE_START_LOCATION(parser, node) \
144 ((pm_node_location_t) { .line = pm_newline_list_line(&(parser)->newline_list, ((const pm_node_t *) (node))->location.start, (parser)->start_line), .node_id = ((const pm_node_t *) (node))->node_id })
145
146#define PM_NODE_END_LOCATION(parser, node) \
147 ((pm_node_location_t) { .line = pm_newline_list_line(&(parser)->newline_list, ((const pm_node_t *) (node))->location.end, (parser)->start_line), .node_id = ((const pm_node_t *) (node))->node_id })
148
149#define PM_LOCATION_START_LOCATION(parser, location, id) \
150 ((pm_node_location_t) { .line = pm_newline_list_line(&(parser)->newline_list, (location)->start, (parser)->start_line), .node_id = id })
151
152#define PM_NODE_START_LINE_COLUMN(parser, node) \
153 pm_newline_list_line_column(&(parser)->newline_list, ((const pm_node_t *) (node))->location.start, (parser)->start_line)
154
155#define PM_NODE_END_LINE_COLUMN(parser, node) \
156 pm_newline_list_line_column(&(parser)->newline_list, ((const pm_node_t *) (node))->location.end, (parser)->start_line)
157
158#define PM_LOCATION_START_LINE_COLUMN(parser, location) \
159 pm_newline_list_line_column(&(parser)->newline_list, (location)->start, (parser)->start_line)
160
161static int
162pm_node_line_number(const pm_parser_t *parser, const pm_node_t *node)
163{
164 return (int) pm_newline_list_line(&parser->newline_list, node->location.start, parser->start_line);
165}
166
167static int
168pm_location_line_number(const pm_parser_t *parser, const pm_location_t *location) {
169 return (int) pm_newline_list_line(&parser->newline_list, location->start, parser->start_line);
170}
171
175static VALUE
176parse_integer_value(const pm_integer_t *integer)
177{
178 VALUE result;
179
180 if (integer->values == NULL) {
181 result = UINT2NUM(integer->value);
182 }
183 else {
184 VALUE string = rb_str_new(NULL, integer->length * 8);
185 unsigned char *bytes = (unsigned char *) RSTRING_PTR(string);
186
187 size_t offset = integer->length * 8;
188 for (size_t value_index = 0; value_index < integer->length; value_index++) {
189 uint32_t value = integer->values[value_index];
190
191 for (int index = 0; index < 8; index++) {
192 int byte = (value >> (4 * index)) & 0xf;
193 bytes[--offset] = byte < 10 ? byte + '0' : byte - 10 + 'a';
194 }
195 }
196
197 result = rb_funcall(string, rb_intern("to_i"), 1, UINT2NUM(16));
198 }
199
200 if (integer->negative) {
201 result = rb_funcall(result, rb_intern("-@"), 0);
202 }
203
204 if (!SPECIAL_CONST_P(result)) {
205 RB_OBJ_SET_SHAREABLE(result); // bignum
206 }
207
208 return result;
209}
210
214static inline VALUE
215parse_integer(const pm_integer_node_t *node)
216{
217 return parse_integer_value(&node->value);
218}
219
223static VALUE
224parse_float(const pm_float_node_t *node)
225{
226 VALUE val = DBL2NUM(node->value);
227 if (!FLONUM_P(val)) {
228 RB_OBJ_SET_SHAREABLE(val);
229 }
230 return val;
231}
232
239static VALUE
240parse_rational(const pm_rational_node_t *node)
241{
242 VALUE numerator = parse_integer_value(&node->numerator);
243 VALUE denominator = parse_integer_value(&node->denominator);
244
245 return rb_ractor_make_shareable(rb_rational_new(numerator, denominator));
246}
247
254static VALUE
255parse_imaginary(const pm_imaginary_node_t *node)
256{
257 VALUE imaginary_part;
258 switch (PM_NODE_TYPE(node->numeric)) {
259 case PM_FLOAT_NODE: {
260 imaginary_part = parse_float((const pm_float_node_t *) node->numeric);
261 break;
262 }
263 case PM_INTEGER_NODE: {
264 imaginary_part = parse_integer((const pm_integer_node_t *) node->numeric);
265 break;
266 }
267 case PM_RATIONAL_NODE: {
268 imaginary_part = parse_rational((const pm_rational_node_t *) node->numeric);
269 break;
270 }
271 default:
272 rb_bug("Unexpected numeric type on imaginary number %s\n", pm_node_type_to_str(PM_NODE_TYPE(node->numeric)));
273 }
274
275 return RB_OBJ_SET_SHAREABLE(rb_complex_raw(INT2FIX(0), imaginary_part));
276}
277
278static inline VALUE
279parse_string(const pm_scope_node_t *scope_node, const pm_string_t *string)
280{
281 return rb_enc_str_new((const char *) pm_string_source(string), pm_string_length(string), scope_node->encoding);
282}
283
289static inline VALUE
290parse_string_encoded(const pm_node_t *node, const pm_string_t *string, rb_encoding *default_encoding)
291{
292 rb_encoding *encoding;
293
294 if (node->flags & PM_ENCODING_FLAGS_FORCED_BINARY_ENCODING) {
295 encoding = rb_ascii8bit_encoding();
296 }
297 else if (node->flags & PM_ENCODING_FLAGS_FORCED_UTF8_ENCODING) {
298 encoding = rb_utf8_encoding();
299 }
300 else {
301 encoding = default_encoding;
302 }
303
304 return rb_enc_str_new((const char *) pm_string_source(string), pm_string_length(string), encoding);
305}
306
307static inline VALUE
308parse_static_literal_string(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *string)
309{
310 rb_encoding *encoding;
311
312 if (node->flags & PM_STRING_FLAGS_FORCED_BINARY_ENCODING) {
313 encoding = rb_ascii8bit_encoding();
314 }
315 else if (node->flags & PM_STRING_FLAGS_FORCED_UTF8_ENCODING) {
316 encoding = rb_utf8_encoding();
317 }
318 else {
319 encoding = scope_node->encoding;
320 }
321
322 VALUE value = rb_enc_literal_str((const char *) pm_string_source(string), pm_string_length(string), encoding);
324
325 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal || RTEST(ruby_debug)) {
326 int line_number = pm_node_line_number(scope_node->parser, node);
327 value = rb_ractor_make_shareable(rb_str_with_debug_created_info(value, rb_iseq_path(iseq), line_number));
328 }
329
330 return value;
331}
332
333static inline ID
334parse_string_symbol(const pm_scope_node_t *scope_node, const pm_symbol_node_t *symbol)
335{
336 rb_encoding *encoding;
337 if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_UTF8_ENCODING) {
338 encoding = rb_utf8_encoding();
339 }
340 else if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_BINARY_ENCODING) {
341 encoding = rb_ascii8bit_encoding();
342 }
343 else if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_US_ASCII_ENCODING) {
344 encoding = rb_usascii_encoding();
345 }
346 else {
347 encoding = scope_node->encoding;
348 }
349
350 return rb_intern3((const char *) pm_string_source(&symbol->unescaped), pm_string_length(&symbol->unescaped), encoding);
351}
352
353static int
354pm_optimizable_range_item_p(const pm_node_t *node)
355{
356 return (!node || PM_NODE_TYPE_P(node, PM_INTEGER_NODE) || PM_NODE_TYPE_P(node, PM_NIL_NODE));
357}
358
360static VALUE
361parse_regexp_error(rb_iseq_t *iseq, int32_t line_number, const char *fmt, ...)
362{
363 va_list args;
364 va_start(args, fmt);
365 VALUE error = rb_syntax_error_append(Qnil, rb_iseq_path(iseq), line_number, -1, NULL, "%" PRIsVALUE, args);
366 va_end(args);
367 rb_exc_raise(error);
368}
369
370static VALUE
371parse_regexp_string_part(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *unescaped, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding)
372{
373 // If we were passed an explicit regexp encoding, then we need to double
374 // check that it's okay here for this fragment of the string.
375 rb_encoding *encoding;
376
377 if (explicit_regexp_encoding != NULL) {
378 encoding = explicit_regexp_encoding;
379 }
380 else if (node->flags & PM_STRING_FLAGS_FORCED_BINARY_ENCODING) {
381 encoding = rb_ascii8bit_encoding();
382 }
383 else if (node->flags & PM_STRING_FLAGS_FORCED_UTF8_ENCODING) {
384 encoding = rb_utf8_encoding();
385 }
386 else {
387 encoding = implicit_regexp_encoding;
388 }
389
390 VALUE string = rb_enc_str_new((const char *) pm_string_source(unescaped), pm_string_length(unescaped), encoding);
391 VALUE error = rb_reg_check_preprocess(string);
392
393 if (error != Qnil) parse_regexp_error(iseq, pm_node_line_number(scope_node->parser, node), "%" PRIsVALUE, rb_obj_as_string(error));
394 return string;
395}
396
397static VALUE
398pm_static_literal_concat(rb_iseq_t *iseq, const pm_node_list_t *nodes, const pm_scope_node_t *scope_node, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding, bool top)
399{
400 VALUE current = Qnil;
401
402 for (size_t index = 0; index < nodes->size; index++) {
403 const pm_node_t *part = nodes->nodes[index];
404 VALUE string;
405
406 switch (PM_NODE_TYPE(part)) {
407 case PM_STRING_NODE:
408 if (implicit_regexp_encoding != NULL) {
409 if (top) {
410 string = parse_regexp_string_part(iseq, scope_node, part, &((const pm_string_node_t *) part)->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
411 }
412 else {
413 string = parse_string_encoded(part, &((const pm_string_node_t *) part)->unescaped, scope_node->encoding);
414 VALUE error = rb_reg_check_preprocess(string);
415 if (error != Qnil) parse_regexp_error(iseq, pm_node_line_number(scope_node->parser, part), "%" PRIsVALUE, rb_obj_as_string(error));
416 }
417 }
418 else {
419 string = parse_string_encoded(part, &((const pm_string_node_t *) part)->unescaped, scope_node->encoding);
420 }
421 break;
422 case PM_INTERPOLATED_STRING_NODE:
423 string = pm_static_literal_concat(iseq, &((const pm_interpolated_string_node_t *) part)->parts, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
424 break;
425 case PM_EMBEDDED_STATEMENTS_NODE: {
427 string = pm_static_literal_concat(iseq, &cast->statements->body, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
428 break;
429 }
430 default:
431 RUBY_ASSERT(false && "unexpected node type in pm_static_literal_concat");
432 return Qnil;
433 }
434
435 if (current != Qnil) {
436 current = rb_str_concat(current, string);
437 }
438 else {
439 current = string;
440 }
441 }
442
443 return top ? rb_fstring(current) : current;
444}
445
446#define RE_OPTION_ENCODING_SHIFT 8
447#define RE_OPTION_ENCODING(encoding) (((encoding) & 0xFF) << RE_OPTION_ENCODING_SHIFT)
448#define ARG_ENCODING_NONE 32
449#define ARG_ENCODING_FIXED 16
450#define ENC_ASCII8BIT 1
451#define ENC_EUC_JP 2
452#define ENC_Windows_31J 3
453#define ENC_UTF8 4
454
459static int
460parse_regexp_flags(const pm_node_t *node)
461{
462 int flags = 0;
463
464 // Check "no encoding" first so that flags don't get clobbered
465 // We're calling `rb_char_to_option_kcode` in this case so that
466 // we don't need to have access to `ARG_ENCODING_NONE`
467 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ASCII_8BIT)) {
468 flags |= ARG_ENCODING_NONE;
469 }
470
471 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EUC_JP)) {
472 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_EUC_JP));
473 }
474
475 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_WINDOWS_31J)) {
476 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_Windows_31J));
477 }
478
479 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_UTF_8)) {
480 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_UTF8));
481 }
482
483 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_IGNORE_CASE)) {
484 flags |= ONIG_OPTION_IGNORECASE;
485 }
486
487 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_MULTI_LINE)) {
488 flags |= ONIG_OPTION_MULTILINE;
489 }
490
491 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EXTENDED)) {
492 flags |= ONIG_OPTION_EXTEND;
493 }
494
495 return flags;
496}
497
498#undef RE_OPTION_ENCODING_SHIFT
499#undef RE_OPTION_ENCODING
500#undef ARG_ENCODING_FIXED
501#undef ARG_ENCODING_NONE
502#undef ENC_ASCII8BIT
503#undef ENC_EUC_JP
504#undef ENC_Windows_31J
505#undef ENC_UTF8
506
507static rb_encoding *
508parse_regexp_encoding(const pm_scope_node_t *scope_node, const pm_node_t *node)
509{
510 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_FORCED_BINARY_ENCODING) || PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ASCII_8BIT)) {
511 return rb_ascii8bit_encoding();
512 }
513 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_UTF_8)) {
514 return rb_utf8_encoding();
515 }
516 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EUC_JP)) {
517 return rb_enc_get_from_index(ENCINDEX_EUC_JP);
518 }
519 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_WINDOWS_31J)) {
520 return rb_enc_get_from_index(ENCINDEX_Windows_31J);
521 }
522 else {
523 return NULL;
524 }
525}
526
527static VALUE
528parse_regexp(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, VALUE string)
529{
530 VALUE errinfo = rb_errinfo();
531
532 int32_t line_number = pm_node_line_number(scope_node->parser, node);
533 VALUE regexp = rb_reg_compile(string, parse_regexp_flags(node), (const char *) pm_string_source(&scope_node->parser->filepath), line_number);
534
535 if (NIL_P(regexp)) {
536 VALUE message = rb_attr_get(rb_errinfo(), idMesg);
537 rb_set_errinfo(errinfo);
538
539 parse_regexp_error(iseq, line_number, "%" PRIsVALUE, message);
540 return Qnil;
541 }
542
543 return RB_OBJ_SET_SHAREABLE(rb_obj_freeze(regexp));
544}
545
546static inline VALUE
547parse_regexp_literal(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *unescaped)
548{
549 rb_encoding *regexp_encoding = parse_regexp_encoding(scope_node, node);
550 if (regexp_encoding == NULL) regexp_encoding = scope_node->encoding;
551
552 VALUE string = rb_enc_str_new((const char *) pm_string_source(unescaped), pm_string_length(unescaped), regexp_encoding);
553 RB_OBJ_SET_SHAREABLE(string);
554 return parse_regexp(iseq, scope_node, node, string);
555}
556
557static inline VALUE
558parse_regexp_concat(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_node_list_t *parts)
559{
560 rb_encoding *explicit_regexp_encoding = parse_regexp_encoding(scope_node, node);
561 rb_encoding *implicit_regexp_encoding = explicit_regexp_encoding != NULL ? explicit_regexp_encoding : scope_node->encoding;
562
563 VALUE string = pm_static_literal_concat(iseq, parts, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
564 return parse_regexp(iseq, scope_node, node, string);
565}
566
567static void pm_compile_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node);
568
569static int
570pm_interpolated_node_compile(rb_iseq_t *iseq, const pm_node_list_t *parts, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding, bool mutable_result, bool frozen_result)
571{
572 int stack_size = 0;
573 size_t parts_size = parts->size;
574 bool interpolated = false;
575
576 if (parts_size > 0) {
577 VALUE current_string = Qnil;
578 pm_node_location_t current_location = *node_location;
579
580 for (size_t index = 0; index < parts_size; index++) {
581 const pm_node_t *part = parts->nodes[index];
582
583 if (PM_NODE_TYPE_P(part, PM_STRING_NODE)) {
584 const pm_string_node_t *string_node = (const pm_string_node_t *) part;
585 VALUE string_value;
586
587 if (implicit_regexp_encoding == NULL) {
588 string_value = parse_string_encoded(part, &string_node->unescaped, scope_node->encoding);
589 }
590 else {
591 string_value = parse_regexp_string_part(iseq, scope_node, (const pm_node_t *) string_node, &string_node->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
592 }
593
594 if (RTEST(current_string)) {
595 current_string = rb_str_concat(current_string, string_value);
596 }
597 else {
598 current_string = string_value;
599 if (index != 0) current_location = PM_NODE_END_LOCATION(scope_node->parser, part);
600 }
601 }
602 else {
603 interpolated = true;
604
605 if (
606 PM_NODE_TYPE_P(part, PM_EMBEDDED_STATEMENTS_NODE) &&
607 ((const pm_embedded_statements_node_t *) part)->statements != NULL &&
608 ((const pm_embedded_statements_node_t *) part)->statements->body.size == 1 &&
609 PM_NODE_TYPE_P(((const pm_embedded_statements_node_t *) part)->statements->body.nodes[0], PM_STRING_NODE)
610 ) {
611 const pm_string_node_t *string_node = (const pm_string_node_t *) ((const pm_embedded_statements_node_t *) part)->statements->body.nodes[0];
612 VALUE string_value;
613
614 if (implicit_regexp_encoding == NULL) {
615 string_value = parse_string_encoded(part, &string_node->unescaped, scope_node->encoding);
616 }
617 else {
618 string_value = parse_regexp_string_part(iseq, scope_node, (const pm_node_t *) string_node, &string_node->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
619 }
620
621 if (RTEST(current_string)) {
622 current_string = rb_str_concat(current_string, string_value);
623 }
624 else {
625 current_string = string_value;
626 current_location = PM_NODE_START_LOCATION(scope_node->parser, part);
627 }
628 }
629 else {
630 if (!RTEST(current_string)) {
631 rb_encoding *encoding;
632
633 if (implicit_regexp_encoding != NULL) {
634 if (explicit_regexp_encoding != NULL) {
635 encoding = explicit_regexp_encoding;
636 }
637 else if (scope_node->parser->encoding == PM_ENCODING_US_ASCII_ENTRY) {
638 encoding = rb_ascii8bit_encoding();
639 }
640 else {
641 encoding = implicit_regexp_encoding;
642 }
643 }
644 else {
645 encoding = scope_node->encoding;
646 }
647
648 if (parts_size == 1) {
649 current_string = rb_enc_str_new(NULL, 0, encoding);
650 }
651 }
652
653 if (RTEST(current_string)) {
654 VALUE operand = rb_fstring(current_string);
655 PUSH_INSN1(ret, current_location, putobject, operand);
656 stack_size++;
657 }
658
659 PM_COMPILE_NOT_POPPED(part);
660
661 const pm_node_location_t current_location = PM_NODE_START_LOCATION(scope_node->parser, part);
662 PUSH_INSN(ret, current_location, dup);
663
664 {
665 const struct rb_callinfo *callinfo = new_callinfo(iseq, idTo_s, 0, VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE, NULL, FALSE);
666 PUSH_INSN1(ret, current_location, objtostring, callinfo);
667 }
668
669 PUSH_INSN(ret, current_location, anytostring);
670
671 current_string = Qnil;
672 stack_size++;
673 }
674 }
675 }
676
677 if (RTEST(current_string)) {
678 current_string = rb_fstring(current_string);
679
680 if (stack_size == 0) {
681 if (frozen_result) {
682 PUSH_INSN1(ret, current_location, putobject, current_string);
683 } else if (mutable_result || interpolated) {
684 PUSH_INSN1(ret, current_location, putstring, current_string);
685 } else {
686 PUSH_INSN1(ret, current_location, putchilledstring, current_string);
687 }
688 } else {
689 PUSH_INSN1(ret, current_location, putobject, current_string);
690 }
691
692 current_string = Qnil;
693 stack_size++;
694 }
695 }
696 else {
697 PUSH_INSN(ret, *node_location, putnil);
698 }
699
700 return stack_size;
701}
702
703static void
704pm_compile_regexp_dynamic(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_list_t *parts, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
705{
706 rb_encoding *explicit_regexp_encoding = parse_regexp_encoding(scope_node, node);
707 rb_encoding *implicit_regexp_encoding = explicit_regexp_encoding != NULL ? explicit_regexp_encoding : scope_node->encoding;
708
709 int length = pm_interpolated_node_compile(iseq, parts, node_location, ret, popped, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false, false);
710 PUSH_INSN2(ret, *node_location, toregexp, INT2FIX(parse_regexp_flags(node) & 0xFF), INT2FIX(length));
711}
712
713static VALUE
714pm_source_file_value(const pm_source_file_node_t *node, const pm_scope_node_t *scope_node)
715{
716 const pm_string_t *filepath = &node->filepath;
717 size_t length = pm_string_length(filepath);
718
719 if (length > 0) {
720 rb_encoding *filepath_encoding = scope_node->filepath_encoding != NULL ? scope_node->filepath_encoding : rb_utf8_encoding();
721 return rb_enc_interned_str((const char *) pm_string_source(filepath), length, filepath_encoding);
722 }
723 else {
724 return rb_fstring_lit("<compiled>");
725 }
726}
727
732static VALUE
733pm_static_literal_string(rb_iseq_t *iseq, VALUE string, int line_number)
734{
735 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal || RTEST(ruby_debug)) {
736 VALUE str = rb_str_with_debug_created_info(string, rb_iseq_path(iseq), line_number);
737 RB_OBJ_SET_SHAREABLE(str);
738 return str;
739 }
740 else {
741 return rb_fstring(string);
742 }
743}
744
750static VALUE
751pm_static_literal_value(rb_iseq_t *iseq, const pm_node_t *node, const pm_scope_node_t *scope_node)
752{
753 // Every node that comes into this function should already be marked as
754 // static literal. If it's not, then we have a bug somewhere.
755 RUBY_ASSERT(PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL));
756
757 switch (PM_NODE_TYPE(node)) {
758 case PM_ARRAY_NODE: {
759 const pm_array_node_t *cast = (const pm_array_node_t *) node;
760 const pm_node_list_t *elements = &cast->elements;
761
762 VALUE value = rb_ary_hidden_new(elements->size);
763 for (size_t index = 0; index < elements->size; index++) {
764 rb_ary_push(value, pm_static_literal_value(iseq, elements->nodes[index], scope_node));
765 }
766
767 RB_OBJ_SET_FROZEN_SHAREABLE(value);
768 return value;
769 }
770 case PM_FALSE_NODE:
771 return Qfalse;
772 case PM_FLOAT_NODE:
773 return parse_float((const pm_float_node_t *) node);
774 case PM_HASH_NODE: {
775 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
776 const pm_node_list_t *elements = &cast->elements;
777
778 VALUE array = rb_ary_hidden_new(elements->size * 2);
779 for (size_t index = 0; index < elements->size; index++) {
780 RUBY_ASSERT(PM_NODE_TYPE_P(elements->nodes[index], PM_ASSOC_NODE));
781 const pm_assoc_node_t *cast = (const pm_assoc_node_t *) elements->nodes[index];
782 VALUE pair[2] = { pm_static_literal_value(iseq, cast->key, scope_node), pm_static_literal_value(iseq, cast->value, scope_node) };
783 rb_ary_cat(array, pair, 2);
784 }
785
786 VALUE value = rb_hash_new_with_size(elements->size);
787 rb_hash_bulk_insert(RARRAY_LEN(array), RARRAY_CONST_PTR(array), value);
788
789 value = rb_obj_hide(value);
790 RB_OBJ_SET_FROZEN_SHAREABLE(value);
791 return value;
792 }
793 case PM_IMAGINARY_NODE:
794 return parse_imaginary((const pm_imaginary_node_t *) node);
795 case PM_INTEGER_NODE:
796 return parse_integer((const pm_integer_node_t *) node);
797 case PM_INTERPOLATED_MATCH_LAST_LINE_NODE: {
799 return parse_regexp_concat(iseq, scope_node, (const pm_node_t *) cast, &cast->parts);
800 }
801 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
803 return parse_regexp_concat(iseq, scope_node, (const pm_node_t *) cast, &cast->parts);
804 }
805 case PM_INTERPOLATED_STRING_NODE: {
806 VALUE string = pm_static_literal_concat(iseq, &((const pm_interpolated_string_node_t *) node)->parts, scope_node, NULL, NULL, false);
807 int line_number = pm_node_line_number(scope_node->parser, node);
808 return pm_static_literal_string(iseq, string, line_number);
809 }
810 case PM_INTERPOLATED_SYMBOL_NODE: {
812 VALUE string = pm_static_literal_concat(iseq, &cast->parts, scope_node, NULL, NULL, true);
813
814 return ID2SYM(rb_intern_str(string));
815 }
816 case PM_MATCH_LAST_LINE_NODE: {
817 const pm_match_last_line_node_t *cast = (const pm_match_last_line_node_t *) node;
818 return parse_regexp_literal(iseq, scope_node, (const pm_node_t *) cast, &cast->unescaped);
819 }
820 case PM_NIL_NODE:
821 return Qnil;
822 case PM_RATIONAL_NODE:
823 return parse_rational((const pm_rational_node_t *) node);
824 case PM_REGULAR_EXPRESSION_NODE: {
826 return parse_regexp_literal(iseq, scope_node, (const pm_node_t *) cast, &cast->unescaped);
827 }
828 case PM_SOURCE_ENCODING_NODE:
829 return rb_enc_from_encoding(scope_node->encoding);
830 case PM_SOURCE_FILE_NODE: {
831 const pm_source_file_node_t *cast = (const pm_source_file_node_t *) node;
832 return pm_source_file_value(cast, scope_node);
833 }
834 case PM_SOURCE_LINE_NODE:
835 return INT2FIX(pm_node_line_number(scope_node->parser, node));
836 case PM_STRING_NODE: {
837 const pm_string_node_t *cast = (const pm_string_node_t *) node;
838 return parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
839 }
840 case PM_SYMBOL_NODE:
841 return ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) node));
842 case PM_TRUE_NODE:
843 return Qtrue;
844 default:
845 rb_bug("Don't have a literal value for node type %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
846 return Qfalse;
847 }
848}
849
854pm_code_location(const pm_scope_node_t *scope_node, const pm_node_t *node)
855{
856 const pm_line_column_t start_location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
857 const pm_line_column_t end_location = PM_NODE_END_LINE_COLUMN(scope_node->parser, node);
858
859 return (rb_code_location_t) {
860 .beg_pos = { .lineno = start_location.line, .column = start_location.column },
861 .end_pos = { .lineno = end_location.line, .column = end_location.column }
862 };
863}
864
870#define PM_BRANCH_COVERAGE_P(iseq) (ISEQ_COVERAGE(iseq) && ISEQ_BRANCH_COVERAGE(iseq))
871
872static void
873pm_compile_branch_condition(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const pm_node_t *cond,
874 LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node);
875
876static void
877pm_compile_logical(rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_node_t *cond, LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node)
878{
879 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, cond);
880
881 DECL_ANCHOR(seq);
882
883 LABEL *label = NEW_LABEL(location.line);
884 if (!then_label) then_label = label;
885 else if (!else_label) else_label = label;
886
887 pm_compile_branch_condition(iseq, seq, cond, then_label, else_label, popped, scope_node);
888
889 if (LIST_INSN_SIZE_ONE(seq)) {
890 INSN *insn = (INSN *) ELEM_FIRST_INSN(FIRST_ELEMENT(seq));
891 if (insn->insn_id == BIN(jump) && (LABEL *)(insn->operands[0]) == label) return;
892 }
893
894 if (!label->refcnt) {
895 if (popped) PUSH_INSN(ret, location, putnil);
896 }
897 else {
898 PUSH_LABEL(seq, label);
899 }
900
901 PUSH_SEQ(ret, seq);
902 return;
903}
904
905static void
906pm_compile_flip_flop_bound(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
907{
908 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
909
910 if (PM_NODE_TYPE_P(node, PM_INTEGER_NODE)) {
911 PM_COMPILE_NOT_POPPED(node);
912
913 VALUE operand = ID2SYM(rb_intern("$."));
914 PUSH_INSN1(ret, location, getglobal, operand);
915
916 PUSH_SEND(ret, location, idEq, INT2FIX(1));
917 if (popped) PUSH_INSN(ret, location, pop);
918 }
919 else {
920 PM_COMPILE(node);
921 }
922}
923
924static void
925pm_compile_flip_flop(const pm_flip_flop_node_t *flip_flop_node, LABEL *else_label, LABEL *then_label, rb_iseq_t *iseq, const int lineno, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
926{
927 const pm_node_location_t location = { .line = lineno, .node_id = -1 };
928 LABEL *lend = NEW_LABEL(location.line);
929
930 int again = !(flip_flop_node->base.flags & PM_RANGE_FLAGS_EXCLUDE_END);
931
932 rb_num_t count = ISEQ_FLIP_CNT_INCREMENT(ISEQ_BODY(iseq)->local_iseq) + VM_SVAR_FLIPFLOP_START;
933 VALUE key = INT2FIX(count);
934
935 PUSH_INSN2(ret, location, getspecial, key, INT2FIX(0));
936 PUSH_INSNL(ret, location, branchif, lend);
937
938 if (flip_flop_node->left) {
939 pm_compile_flip_flop_bound(iseq, flip_flop_node->left, ret, popped, scope_node);
940 }
941 else {
942 PUSH_INSN(ret, location, putnil);
943 }
944
945 PUSH_INSNL(ret, location, branchunless, else_label);
946 PUSH_INSN1(ret, location, putobject, Qtrue);
947 PUSH_INSN1(ret, location, setspecial, key);
948 if (!again) {
949 PUSH_INSNL(ret, location, jump, then_label);
950 }
951
952 PUSH_LABEL(ret, lend);
953 if (flip_flop_node->right) {
954 pm_compile_flip_flop_bound(iseq, flip_flop_node->right, ret, popped, scope_node);
955 }
956 else {
957 PUSH_INSN(ret, location, putnil);
958 }
959
960 PUSH_INSNL(ret, location, branchunless, then_label);
961 PUSH_INSN1(ret, location, putobject, Qfalse);
962 PUSH_INSN1(ret, location, setspecial, key);
963 PUSH_INSNL(ret, location, jump, then_label);
964}
965
966static void pm_compile_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition);
967
968static void
969pm_compile_branch_condition(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const pm_node_t *cond, LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node)
970{
971 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, cond);
972
973again:
974 switch (PM_NODE_TYPE(cond)) {
975 case PM_AND_NODE: {
976 const pm_and_node_t *cast = (const pm_and_node_t *) cond;
977 pm_compile_logical(iseq, ret, cast->left, NULL, else_label, popped, scope_node);
978
979 cond = cast->right;
980 goto again;
981 }
982 case PM_OR_NODE: {
983 const pm_or_node_t *cast = (const pm_or_node_t *) cond;
984 pm_compile_logical(iseq, ret, cast->left, then_label, NULL, popped, scope_node);
985
986 cond = cast->right;
987 goto again;
988 }
989 case PM_FALSE_NODE:
990 case PM_NIL_NODE:
991 PUSH_INSNL(ret, location, jump, else_label);
992 return;
993 case PM_FLOAT_NODE:
994 case PM_IMAGINARY_NODE:
995 case PM_INTEGER_NODE:
996 case PM_LAMBDA_NODE:
997 case PM_RATIONAL_NODE:
998 case PM_REGULAR_EXPRESSION_NODE:
999 case PM_STRING_NODE:
1000 case PM_SYMBOL_NODE:
1001 case PM_TRUE_NODE:
1002 PUSH_INSNL(ret, location, jump, then_label);
1003 return;
1004 case PM_FLIP_FLOP_NODE:
1005 pm_compile_flip_flop((const pm_flip_flop_node_t *) cond, else_label, then_label, iseq, location.line, ret, popped, scope_node);
1006 return;
1007 case PM_DEFINED_NODE: {
1008 const pm_defined_node_t *cast = (const pm_defined_node_t *) cond;
1009 pm_compile_defined_expr(iseq, cast->value, &location, ret, popped, scope_node, true);
1010 break;
1011 }
1012 default: {
1013 DECL_ANCHOR(cond_seq);
1014 pm_compile_node(iseq, cond, cond_seq, false, scope_node);
1015
1016 if (LIST_INSN_SIZE_ONE(cond_seq)) {
1017 INSN *insn = (INSN *) ELEM_FIRST_INSN(FIRST_ELEMENT(cond_seq));
1018
1019 if (insn->insn_id == BIN(putobject)) {
1020 if (RTEST(insn->operands[0])) {
1021 PUSH_INSNL(ret, location, jump, then_label);
1022 // maybe unreachable
1023 return;
1024 }
1025 else {
1026 PUSH_INSNL(ret, location, jump, else_label);
1027 return;
1028 }
1029 }
1030 }
1031
1032 PUSH_SEQ(ret, cond_seq);
1033 break;
1034 }
1035 }
1036
1037 PUSH_INSNL(ret, location, branchunless, else_label);
1038 PUSH_INSNL(ret, location, jump, then_label);
1039}
1040
1044static void
1045pm_compile_conditional(rb_iseq_t *iseq, const pm_node_location_t *node_location, pm_node_type_t type, const pm_node_t *node, const pm_statements_node_t *statements, const pm_node_t *subsequent, const pm_node_t *predicate, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1046{
1047 const pm_node_location_t location = *node_location;
1048 LABEL *then_label = NEW_LABEL(location.line);
1049 LABEL *else_label = NEW_LABEL(location.line);
1050 LABEL *end_label = NULL;
1051
1052 DECL_ANCHOR(cond_seq);
1053 pm_compile_branch_condition(iseq, cond_seq, predicate, then_label, else_label, false, scope_node);
1054 PUSH_SEQ(ret, cond_seq);
1055
1056 rb_code_location_t conditional_location = { 0 };
1057 VALUE branches = Qfalse;
1058
1059 if (then_label->refcnt && else_label->refcnt && PM_BRANCH_COVERAGE_P(iseq)) {
1060 conditional_location = pm_code_location(scope_node, node);
1061 branches = decl_branch_base(iseq, PTR2NUM(node), &conditional_location, type == PM_IF_NODE ? "if" : "unless");
1062 }
1063
1064 if (then_label->refcnt) {
1065 PUSH_LABEL(ret, then_label);
1066
1067 DECL_ANCHOR(then_seq);
1068
1069 if (statements != NULL) {
1070 pm_compile_node(iseq, (const pm_node_t *) statements, then_seq, popped, scope_node);
1071 }
1072 else if (!popped) {
1073 PUSH_SYNTHETIC_PUTNIL(then_seq, iseq);
1074 }
1075
1076 if (else_label->refcnt) {
1077 // Establish branch coverage for the then block.
1078 if (PM_BRANCH_COVERAGE_P(iseq)) {
1079 rb_code_location_t branch_location;
1080
1081 if (statements != NULL) {
1082 branch_location = pm_code_location(scope_node, (const pm_node_t *) statements);
1083 } else if (type == PM_IF_NODE) {
1084 pm_line_column_t predicate_end = PM_NODE_END_LINE_COLUMN(scope_node->parser, predicate);
1085 branch_location = (rb_code_location_t) {
1086 .beg_pos = { .lineno = predicate_end.line, .column = predicate_end.column },
1087 .end_pos = { .lineno = predicate_end.line, .column = predicate_end.column }
1088 };
1089 } else {
1090 branch_location = conditional_location;
1091 }
1092
1093 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 0, type == PM_IF_NODE ? "then" : "else", branches);
1094 }
1095
1096 end_label = NEW_LABEL(location.line);
1097 PUSH_INSNL(then_seq, location, jump, end_label);
1098 if (!popped) PUSH_INSN(then_seq, location, pop);
1099 }
1100
1101 PUSH_SEQ(ret, then_seq);
1102 }
1103
1104 if (else_label->refcnt) {
1105 PUSH_LABEL(ret, else_label);
1106
1107 DECL_ANCHOR(else_seq);
1108
1109 if (subsequent != NULL) {
1110 pm_compile_node(iseq, subsequent, else_seq, popped, scope_node);
1111 }
1112 else if (!popped) {
1113 PUSH_SYNTHETIC_PUTNIL(else_seq, iseq);
1114 }
1115
1116 // Establish branch coverage for the else block.
1117 if (then_label->refcnt && PM_BRANCH_COVERAGE_P(iseq)) {
1118 rb_code_location_t branch_location;
1119
1120 if (subsequent == NULL) {
1121 branch_location = conditional_location;
1122 } else if (PM_NODE_TYPE_P(subsequent, PM_ELSE_NODE)) {
1123 const pm_else_node_t *else_node = (const pm_else_node_t *) subsequent;
1124 branch_location = pm_code_location(scope_node, else_node->statements != NULL ? ((const pm_node_t *) else_node->statements) : (const pm_node_t *) else_node);
1125 } else {
1126 branch_location = pm_code_location(scope_node, (const pm_node_t *) subsequent);
1127 }
1128
1129 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 1, type == PM_IF_NODE ? "else" : "then", branches);
1130 }
1131
1132 PUSH_SEQ(ret, else_seq);
1133 }
1134
1135 if (end_label) {
1136 PUSH_LABEL(ret, end_label);
1137 }
1138
1139 return;
1140}
1141
1145static void
1146pm_compile_loop(rb_iseq_t *iseq, const pm_node_location_t *node_location, pm_node_flags_t flags, enum pm_node_type type, const pm_node_t *node, const pm_statements_node_t *statements, const pm_node_t *predicate, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1147{
1148 const pm_node_location_t location = *node_location;
1149
1150 LABEL *prev_start_label = ISEQ_COMPILE_DATA(iseq)->start_label;
1151 LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
1152 LABEL *prev_redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label;
1153
1154 LABEL *next_label = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(location.line); /* next */
1155 LABEL *redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label = NEW_LABEL(location.line); /* redo */
1156 LABEL *break_label = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(location.line); /* break */
1157 LABEL *end_label = NEW_LABEL(location.line);
1158 LABEL *adjust_label = NEW_LABEL(location.line);
1159
1160 LABEL *next_catch_label = NEW_LABEL(location.line);
1161 LABEL *tmp_label = NULL;
1162
1163 // We're pushing onto the ensure stack because breaks need to break out of
1164 // this loop and not break into the ensure statements within the same
1165 // lexical scope.
1167 push_ensure_entry(iseq, &enl, NULL, NULL);
1168
1169 // begin; end while true
1170 if (flags & PM_LOOP_FLAGS_BEGIN_MODIFIER) {
1171 tmp_label = NEW_LABEL(location.line);
1172 PUSH_INSNL(ret, location, jump, tmp_label);
1173 }
1174 else {
1175 // while true; end
1176 PUSH_INSNL(ret, location, jump, next_label);
1177 }
1178
1179 PUSH_LABEL(ret, adjust_label);
1180 PUSH_INSN(ret, location, putnil);
1181 PUSH_LABEL(ret, next_catch_label);
1182 PUSH_INSN(ret, location, pop);
1183 PUSH_INSNL(ret, location, jump, next_label);
1184 if (tmp_label) PUSH_LABEL(ret, tmp_label);
1185
1186 PUSH_LABEL(ret, redo_label);
1187
1188 // Establish branch coverage for the loop.
1189 if (PM_BRANCH_COVERAGE_P(iseq)) {
1190 rb_code_location_t loop_location = pm_code_location(scope_node, node);
1191 VALUE branches = decl_branch_base(iseq, PTR2NUM(node), &loop_location, type == PM_WHILE_NODE ? "while" : "until");
1192
1193 rb_code_location_t branch_location = statements != NULL ? pm_code_location(scope_node, (const pm_node_t *) statements) : loop_location;
1194 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 0, "body", branches);
1195 }
1196
1197 if (statements != NULL) PM_COMPILE_POPPED((const pm_node_t *) statements);
1198 PUSH_LABEL(ret, next_label);
1199
1200 if (type == PM_WHILE_NODE) {
1201 pm_compile_branch_condition(iseq, ret, predicate, redo_label, end_label, popped, scope_node);
1202 }
1203 else if (type == PM_UNTIL_NODE) {
1204 pm_compile_branch_condition(iseq, ret, predicate, end_label, redo_label, popped, scope_node);
1205 }
1206
1207 PUSH_LABEL(ret, end_label);
1208 PUSH_ADJUST_RESTORE(ret, adjust_label);
1209 PUSH_INSN(ret, location, putnil);
1210
1211 PUSH_LABEL(ret, break_label);
1212 if (popped) PUSH_INSN(ret, location, pop);
1213
1214 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, redo_label, break_label, NULL, break_label);
1215 PUSH_CATCH_ENTRY(CATCH_TYPE_NEXT, redo_label, break_label, NULL, next_catch_label);
1216 PUSH_CATCH_ENTRY(CATCH_TYPE_REDO, redo_label, break_label, NULL, ISEQ_COMPILE_DATA(iseq)->redo_label);
1217
1218 ISEQ_COMPILE_DATA(iseq)->start_label = prev_start_label;
1219 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
1220 ISEQ_COMPILE_DATA(iseq)->redo_label = prev_redo_label;
1221 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->prev;
1222
1223 return;
1224}
1225
1226// This recurses through scopes and finds the local index at any scope level
1227// It also takes a pointer to depth, and increments depth appropriately
1228// according to the depth of the local.
1229static pm_local_index_t
1230pm_lookup_local_index(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, pm_constant_id_t constant_id, int start_depth)
1231{
1232 pm_local_index_t lindex = { 0 };
1233 st_data_t local_index;
1234
1235 int level;
1236 for (level = 0; level < start_depth; level++) {
1237 scope_node = scope_node->previous;
1238 }
1239
1240 while (!st_lookup(scope_node->index_lookup_table, constant_id, &local_index)) {
1241 level++;
1242
1243 if (scope_node->previous) {
1244 scope_node = scope_node->previous;
1245 }
1246 else {
1247 // We have recursed up all scope nodes
1248 // and have not found the local yet
1249 rb_bug("Local with constant_id %u does not exist", (unsigned int) constant_id);
1250 }
1251 }
1252
1253 lindex.level = level;
1254 lindex.index = scope_node->local_table_for_iseq_size - (int) local_index;
1255 return lindex;
1256}
1257
1258// This returns the CRuby ID which maps to the pm_constant_id_t
1259//
1260// Constant_ids in prism are indexes of the constants in prism's constant pool.
1261// We add a constants mapping on the scope_node which is a mapping from
1262// these constant_id indexes to the CRuby IDs that they represent.
1263// This helper method allows easy access to those IDs
1264static ID
1265pm_constant_id_lookup(const pm_scope_node_t *scope_node, pm_constant_id_t constant_id)
1266{
1267 if (constant_id < 1 || constant_id > scope_node->parser->constant_pool.size) {
1268 rb_bug("constant_id out of range: %u", (unsigned int)constant_id);
1269 }
1270 return scope_node->constants[constant_id - 1];
1271}
1272
1273static rb_iseq_t *
1274pm_new_child_iseq(rb_iseq_t *iseq, pm_scope_node_t *node, VALUE name, const rb_iseq_t *parent, enum rb_iseq_type type, int line_no)
1275{
1276 debugs("[new_child_iseq]> ---------------------------------------\n");
1277 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1278 int error_state;
1279 rb_iseq_t *ret_iseq = pm_iseq_new_with_opt(node, name,
1280 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1281 line_no, parent,
1282 isolated_depth ? isolated_depth + 1 : 0,
1283 type, ISEQ_COMPILE_DATA(iseq)->option, &error_state);
1284
1285 if (error_state) {
1286 pm_scope_node_destroy(node);
1287 RUBY_ASSERT(ret_iseq == NULL);
1288 rb_jump_tag(error_state);
1289 }
1290 debugs("[new_child_iseq]< ---------------------------------------\n");
1291 return ret_iseq;
1292}
1293
1294static int
1295pm_compile_class_path(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1296{
1297 if (PM_NODE_TYPE_P(node, PM_CONSTANT_PATH_NODE)) {
1298 const pm_node_t *parent = ((const pm_constant_path_node_t *) node)->parent;
1299
1300 if (parent) {
1301 /* Bar::Foo */
1302 PM_COMPILE(parent);
1303 return VM_DEFINECLASS_FLAG_SCOPED;
1304 }
1305 else {
1306 /* toplevel class ::Foo */
1307 PUSH_INSN1(ret, *node_location, putobject, rb_cObject);
1308 return VM_DEFINECLASS_FLAG_SCOPED;
1309 }
1310 }
1311 else {
1312 /* class at cbase Foo */
1313 PUSH_INSN1(ret, *node_location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
1314 return 0;
1315 }
1316}
1317
1322static void
1323pm_compile_call_and_or_write_node(rb_iseq_t *iseq, bool and_node, const pm_node_t *receiver, const pm_node_t *value, pm_constant_id_t write_name, pm_constant_id_t read_name, bool safe_nav, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1324{
1325 const pm_node_location_t location = *node_location;
1326 LABEL *lfin = NEW_LABEL(location.line);
1327 LABEL *lcfin = NEW_LABEL(location.line);
1328 LABEL *lskip = NULL;
1329
1330 int flag = PM_NODE_TYPE_P(receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
1331 ID id_read_name = pm_constant_id_lookup(scope_node, read_name);
1332
1333 PM_COMPILE_NOT_POPPED(receiver);
1334 if (safe_nav) {
1335 lskip = NEW_LABEL(location.line);
1336 PUSH_INSN(ret, location, dup);
1337 PUSH_INSNL(ret, location, branchnil, lskip);
1338 }
1339
1340 PUSH_INSN(ret, location, dup);
1341 PUSH_SEND_WITH_FLAG(ret, location, id_read_name, INT2FIX(0), INT2FIX(flag));
1342 if (!popped) PUSH_INSN(ret, location, dup);
1343
1344 if (and_node) {
1345 PUSH_INSNL(ret, location, branchunless, lcfin);
1346 }
1347 else {
1348 PUSH_INSNL(ret, location, branchif, lcfin);
1349 }
1350
1351 if (!popped) PUSH_INSN(ret, location, pop);
1352 PM_COMPILE_NOT_POPPED(value);
1353
1354 if (!popped) {
1355 PUSH_INSN(ret, location, swap);
1356 PUSH_INSN1(ret, location, topn, INT2FIX(1));
1357 }
1358
1359 ID id_write_name = pm_constant_id_lookup(scope_node, write_name);
1360 PUSH_SEND_WITH_FLAG(ret, location, id_write_name, INT2FIX(1), INT2FIX(flag));
1361 PUSH_INSNL(ret, location, jump, lfin);
1362
1363 PUSH_LABEL(ret, lcfin);
1364 if (!popped) PUSH_INSN(ret, location, swap);
1365
1366 PUSH_LABEL(ret, lfin);
1367
1368 if (lskip && popped) PUSH_LABEL(ret, lskip);
1369 PUSH_INSN(ret, location, pop);
1370 if (lskip && !popped) PUSH_LABEL(ret, lskip);
1371}
1372
1373static void pm_compile_shareable_constant_value(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_flags_t shareability, VALUE path, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, bool top);
1374
1380static void
1381pm_compile_hash_elements(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_list_t *elements, const pm_node_flags_t shareability, VALUE path, bool argument, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node)
1382{
1383 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
1384
1385 // If this element is not popped, then we need to create the hash on the
1386 // stack. Neighboring plain assoc nodes should be grouped together (either
1387 // by newhash or hash merge). Double splat nodes should be merged using the
1388 // merge_kwd method call.
1389 const int max_stack_length = 0x100;
1390 const unsigned int min_tmp_hash_length = 0x800;
1391
1392 int stack_length = 0;
1393 bool first_chunk = true;
1394
1395 // This is an optimization wherein we keep track of whether or not the
1396 // previous element was a static literal. If it was, then we do not attempt
1397 // to check if we have a subhash that can be optimized. If it was not, then
1398 // we do check.
1399 bool static_literal = false;
1400
1401 DECL_ANCHOR(anchor);
1402
1403 // Convert pushed elements to a hash, and merge if needed.
1404#define FLUSH_CHUNK \
1405 if (stack_length) { \
1406 if (first_chunk) { \
1407 PUSH_SEQ(ret, anchor); \
1408 PUSH_INSN1(ret, location, newhash, INT2FIX(stack_length)); \
1409 first_chunk = false; \
1410 } \
1411 else { \
1412 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
1413 PUSH_INSN(ret, location, swap); \
1414 PUSH_SEQ(ret, anchor); \
1415 PUSH_SEND(ret, location, id_core_hash_merge_ptr, INT2FIX(stack_length + 1)); \
1416 } \
1417 INIT_ANCHOR(anchor); \
1418 stack_length = 0; \
1419 }
1420
1421 for (size_t index = 0; index < elements->size; index++) {
1422 const pm_node_t *element = elements->nodes[index];
1423
1424 switch (PM_NODE_TYPE(element)) {
1425 case PM_ASSOC_NODE: {
1426 // Pre-allocation check (this branch can be omitted).
1427 if (
1428 (shareability == 0) &&
1429 PM_NODE_FLAG_P(element, PM_NODE_FLAG_STATIC_LITERAL) && (
1430 (!static_literal && ((index + min_tmp_hash_length) < elements->size)) ||
1431 (first_chunk && stack_length == 0)
1432 )
1433 ) {
1434 // Count the elements that are statically-known.
1435 size_t count = 1;
1436 while (index + count < elements->size && PM_NODE_FLAG_P(elements->nodes[index + count], PM_NODE_FLAG_STATIC_LITERAL)) count++;
1437
1438 if ((first_chunk && stack_length == 0) || count >= min_tmp_hash_length) {
1439 // The subsequence of elements in this hash is long enough
1440 // to merit its own hash.
1441 VALUE ary = rb_ary_hidden_new(count);
1442
1443 // Create a hidden hash.
1444 for (size_t tmp_end = index + count; index < tmp_end; index++) {
1445 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[index];
1446
1447 VALUE elem[2] = {
1448 pm_static_literal_value(iseq, assoc->key, scope_node),
1449 pm_static_literal_value(iseq, assoc->value, scope_node)
1450 };
1451
1452 rb_ary_cat(ary, elem, 2);
1453 }
1454 index --;
1455
1456 VALUE hash = rb_hash_new_with_size(RARRAY_LEN(ary) / 2);
1457 rb_hash_bulk_insert(RARRAY_LEN(ary), RARRAY_CONST_PTR(ary), hash);
1458 hash = rb_obj_hide(hash);
1459 RB_OBJ_SET_FROZEN_SHAREABLE(hash);
1460
1461 // Emit optimized code.
1462 FLUSH_CHUNK;
1463 if (first_chunk) {
1464 PUSH_INSN1(ret, location, duphash, hash);
1465 first_chunk = false;
1466 }
1467 else {
1468 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1469 PUSH_INSN(ret, location, swap);
1470 PUSH_INSN1(ret, location, putobject, hash);
1471 PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2));
1472 }
1473
1474 break;
1475 }
1476 else {
1477 static_literal = true;
1478 }
1479 }
1480 else {
1481 static_literal = false;
1482 }
1483
1484 // If this is a plain assoc node, then we can compile it directly
1485 // and then add the total number of values on the stack.
1486 if (shareability == 0) {
1487 pm_compile_node(iseq, element, anchor, false, scope_node);
1488 }
1489 else {
1490 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
1491 pm_compile_shareable_constant_value(iseq, assoc->key, shareability, path, ret, scope_node, false);
1492 pm_compile_shareable_constant_value(iseq, assoc->value, shareability, path, ret, scope_node, false);
1493 }
1494
1495 if ((stack_length += 2) >= max_stack_length) FLUSH_CHUNK;
1496 break;
1497 }
1498 case PM_ASSOC_SPLAT_NODE: {
1499 FLUSH_CHUNK;
1500
1501 const pm_assoc_splat_node_t *assoc_splat = (const pm_assoc_splat_node_t *) element;
1502 bool empty_hash = assoc_splat->value != NULL && (
1503 (PM_NODE_TYPE_P(assoc_splat->value, PM_HASH_NODE) && ((const pm_hash_node_t *) assoc_splat->value)->elements.size == 0) ||
1504 PM_NODE_TYPE_P(assoc_splat->value, PM_NIL_NODE)
1505 );
1506
1507 bool first_element = first_chunk && stack_length == 0;
1508 bool last_element = index == elements->size - 1;
1509 bool only_element = first_element && last_element;
1510
1511 if (empty_hash) {
1512 if (only_element && argument) {
1513 // **{} appears at the only keyword argument in method call,
1514 // so it won't be modified.
1515 //
1516 // This is only done for method calls and not for literal
1517 // hashes, because literal hashes should always result in a
1518 // new hash.
1519 PUSH_INSN(ret, location, putnil);
1520 }
1521 else if (first_element) {
1522 // **{} appears as the first keyword argument, so it may be
1523 // modified. We need to create a fresh hash object.
1524 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
1525 }
1526 // Any empty keyword splats that are not the first can be
1527 // ignored since merging an empty hash into the existing hash is
1528 // the same as not merging it.
1529 }
1530 else {
1531 if (only_element && argument) {
1532 // ** is only keyword argument in the method call. Use it
1533 // directly. This will be not be flagged as mutable. This is
1534 // only done for method calls and not for literal hashes,
1535 // because literal hashes should always result in a new
1536 // hash.
1537 if (shareability == 0) {
1538 PM_COMPILE_NOT_POPPED(element);
1539 }
1540 else {
1541 pm_compile_shareable_constant_value(iseq, element, shareability, path, ret, scope_node, false);
1542 }
1543 }
1544 else {
1545 // There is more than one keyword argument, or this is not a
1546 // method call. In that case, we need to add an empty hash
1547 // (if first keyword), or merge the hash to the accumulated
1548 // hash (if not the first keyword).
1549 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1550
1551 if (first_element) {
1552 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
1553 }
1554 else {
1555 PUSH_INSN(ret, location, swap);
1556 }
1557
1558 if (shareability == 0) {
1559 PM_COMPILE_NOT_POPPED(element);
1560 }
1561 else {
1562 pm_compile_shareable_constant_value(iseq, element, shareability, path, ret, scope_node, false);
1563 }
1564
1565 PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2));
1566 }
1567 }
1568
1569 first_chunk = false;
1570 static_literal = false;
1571 break;
1572 }
1573 default:
1574 RUBY_ASSERT("Invalid node type for hash" && false);
1575 break;
1576 }
1577 }
1578
1579 FLUSH_CHUNK;
1580#undef FLUSH_CHUNK
1581}
1582
1583#define SPLATARRAY_FALSE 0
1584#define SPLATARRAY_TRUE 1
1585#define DUP_SINGLE_KW_SPLAT 2
1586
1587// This is details. Users should call pm_setup_args() instead.
1588static int
1589pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *block, int *flags, const bool has_regular_blockarg, struct rb_callinfo_kwarg **kw_arg, int *dup_rest, rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_node_location_t *node_location)
1590{
1591 const pm_node_location_t location = *node_location;
1592
1593 int orig_argc = 0;
1594 bool has_splat = false;
1595 bool has_keyword_splat = false;
1596
1597 if (arguments_node == NULL) {
1598 if (*flags & VM_CALL_FCALL) {
1599 *flags |= VM_CALL_VCALL;
1600 }
1601 }
1602 else {
1603 const pm_node_list_t *arguments = &arguments_node->arguments;
1604 has_keyword_splat = PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_KEYWORD_SPLAT);
1605
1606 // We count the number of elements post the splat node that are not keyword elements to
1607 // eventually pass as an argument to newarray
1608 int post_splat_counter = 0;
1609 const pm_node_t *argument;
1610
1611 PM_NODE_LIST_FOREACH(arguments, index, argument) {
1612 switch (PM_NODE_TYPE(argument)) {
1613 // A keyword hash node contains all keyword arguments as AssocNodes and AssocSplatNodes
1614 case PM_KEYWORD_HASH_NODE: {
1615 const pm_keyword_hash_node_t *keyword_arg = (const pm_keyword_hash_node_t *) argument;
1616 const pm_node_list_t *elements = &keyword_arg->elements;
1617
1618 if (has_keyword_splat || has_splat) {
1619 *flags |= VM_CALL_KW_SPLAT;
1620 has_keyword_splat = true;
1621
1622 if (elements->size > 1 || !(elements->size == 1 && PM_NODE_TYPE_P(elements->nodes[0], PM_ASSOC_SPLAT_NODE))) {
1623 // A new hash will be created for the keyword arguments
1624 // in this case, so mark the method as passing mutable
1625 // keyword splat.
1626 *flags |= VM_CALL_KW_SPLAT_MUT;
1627 pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node);
1628 }
1629 else if (*dup_rest & DUP_SINGLE_KW_SPLAT) {
1630 *flags |= VM_CALL_KW_SPLAT_MUT;
1631 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1632 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
1633 pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node);
1634 PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2));
1635 }
1636 else {
1637 pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node);
1638 }
1639 }
1640 else {
1641 // We need to first figure out if all elements of the
1642 // KeywordHashNode are AssocNodes with symbol keys.
1643 if (PM_NODE_FLAG_P(keyword_arg, PM_KEYWORD_HASH_NODE_FLAGS_SYMBOL_KEYS)) {
1644 // If they are all symbol keys then we can pass them as
1645 // keyword arguments. The first thing we need to do is
1646 // deduplicate. We'll do this using the combination of a
1647 // Ruby hash and a Ruby array.
1648 VALUE stored_indices = rb_hash_new();
1649 VALUE keyword_indices = rb_ary_new_capa(elements->size);
1650
1651 size_t size = 0;
1652 for (size_t element_index = 0; element_index < elements->size; element_index++) {
1653 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1654
1655 // Retrieve the stored index from the hash for this
1656 // keyword.
1657 VALUE keyword = pm_static_literal_value(iseq, assoc->key, scope_node);
1658 VALUE stored_index = rb_hash_aref(stored_indices, keyword);
1659
1660 // If this keyword was already seen in the hash,
1661 // then mark the array at that index as false and
1662 // decrement the keyword size.
1663 if (!NIL_P(stored_index)) {
1664 rb_ary_store(keyword_indices, NUM2LONG(stored_index), Qfalse);
1665 size--;
1666 }
1667
1668 // Store (and possibly overwrite) the index for this
1669 // keyword in the hash, mark the array at that index
1670 // as true, and increment the keyword size.
1671 rb_hash_aset(stored_indices, keyword, ULONG2NUM(element_index));
1672 rb_ary_store(keyword_indices, (long) element_index, Qtrue);
1673 size++;
1674 }
1675
1676 *kw_arg = rb_xmalloc_mul_add(size, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
1677 *flags |= VM_CALL_KWARG;
1678
1679 VALUE *keywords = (*kw_arg)->keywords;
1680 (*kw_arg)->references = 0;
1681 (*kw_arg)->keyword_len = (int) size;
1682
1683 size_t keyword_index = 0;
1684 for (size_t element_index = 0; element_index < elements->size; element_index++) {
1685 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1686 bool popped = true;
1687
1688 if (rb_ary_entry(keyword_indices, (long) element_index) == Qtrue) {
1689 keywords[keyword_index++] = pm_static_literal_value(iseq, assoc->key, scope_node);
1690 popped = false;
1691 }
1692
1693 PM_COMPILE(assoc->value);
1694 }
1695
1696 RUBY_ASSERT(keyword_index == size);
1697 }
1698 else {
1699 // If they aren't all symbol keys then we need to
1700 // construct a new hash and pass that as an argument.
1701 orig_argc++;
1702 *flags |= VM_CALL_KW_SPLAT;
1703
1704 size_t size = elements->size;
1705 if (size > 1) {
1706 // A new hash will be created for the keyword
1707 // arguments in this case, so mark the method as
1708 // passing mutable keyword splat.
1709 *flags |= VM_CALL_KW_SPLAT_MUT;
1710 }
1711
1712 for (size_t element_index = 0; element_index < size; element_index++) {
1713 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1714 PM_COMPILE_NOT_POPPED(assoc->key);
1715 PM_COMPILE_NOT_POPPED(assoc->value);
1716 }
1717
1718 PUSH_INSN1(ret, location, newhash, INT2FIX(size * 2));
1719 }
1720 }
1721 break;
1722 }
1723 case PM_SPLAT_NODE: {
1724 *flags |= VM_CALL_ARGS_SPLAT;
1725 const pm_splat_node_t *splat_node = (const pm_splat_node_t *) argument;
1726
1727 if (splat_node->expression) {
1728 PM_COMPILE_NOT_POPPED(splat_node->expression);
1729 }
1730 else {
1731 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
1732 PUSH_GETLOCAL(ret, location, index.index, index.level);
1733 }
1734
1735 bool first_splat = !has_splat;
1736
1737 if (first_splat) {
1738 // If this is the first splat array seen and it's not the
1739 // last parameter, we want splatarray to dup it.
1740 //
1741 // foo(a, *b, c)
1742 // ^^
1743 if (index + 1 < arguments->size || has_regular_blockarg) {
1744 PUSH_INSN1(ret, location, splatarray, (*dup_rest & SPLATARRAY_TRUE) ? Qtrue : Qfalse);
1745 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
1746 }
1747 // If this is the first spalt array seen and it's the last
1748 // parameter, we don't want splatarray to dup it.
1749 //
1750 // foo(a, *b)
1751 // ^^
1752 else {
1753 PUSH_INSN1(ret, location, splatarray, Qfalse);
1754 }
1755 }
1756 else {
1757 // If this is not the first splat array seen and it is also
1758 // the last parameter, we don't want splatarray to dup it
1759 // and we need to concat the array.
1760 //
1761 // foo(a, *b, *c)
1762 // ^^
1763 PUSH_INSN(ret, location, concattoarray);
1764 }
1765
1766 has_splat = true;
1767 post_splat_counter = 0;
1768
1769 break;
1770 }
1771 case PM_FORWARDING_ARGUMENTS_NODE: { // not counted in argc return value
1772 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
1773
1774 if (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
1775 *flags |= VM_CALL_FORWARDING;
1776
1777 pm_local_index_t mult_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_DOT3, 0);
1778 PUSH_GETLOCAL(ret, location, mult_local.index, mult_local.level);
1779
1780 break;
1781 }
1782
1783 if (has_splat) {
1784 // If we already have a splat, we're concatenating to existing array
1785 orig_argc += 1;
1786 } else {
1787 orig_argc += 2;
1788 }
1789
1790 *flags |= VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_BLOCKARG | VM_CALL_KW_SPLAT;
1791
1792 // Forwarding arguments nodes are treated as foo(*, **, &)
1793 // So foo(...) equals foo(*, **, &) and as such the local
1794 // table for this method is known in advance
1795 //
1796 // Push the *
1797 pm_local_index_t mult_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
1798 PUSH_GETLOCAL(ret, location, mult_local.index, mult_local.level);
1799
1800 if (has_splat) {
1801 // If we already have a splat, we need to concatenate arrays
1802 PUSH_INSN(ret, location, concattoarray);
1803 } else {
1804 PUSH_INSN1(ret, location, splatarray, Qfalse);
1805 }
1806
1807 // Push the **
1808 pm_local_index_t pow_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_POW, 0);
1809 PUSH_GETLOCAL(ret, location, pow_local.index, pow_local.level);
1810
1811 // Push the &
1812 pm_local_index_t and_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_AND, 0);
1813 PUSH_INSN2(ret, location, getblockparamproxy, INT2FIX(and_local.index + VM_ENV_DATA_SIZE - 1), INT2FIX(and_local.level));
1814
1815 break;
1816 }
1817 default: {
1818 post_splat_counter++;
1819 PM_COMPILE_NOT_POPPED(argument);
1820
1821 // If we have a splat and we've seen a splat, we need to process
1822 // everything after the splat.
1823 if (has_splat) {
1824 // Stack items are turned into an array and concatenated in
1825 // the following cases:
1826 //
1827 // If the next node is a splat:
1828 //
1829 // foo(*a, b, *c)
1830 //
1831 // If the next node is a kwarg or kwarg splat:
1832 //
1833 // foo(*a, b, c: :d)
1834 // foo(*a, b, **c)
1835 //
1836 // If the next node is NULL (we have hit the end):
1837 //
1838 // foo(*a, b)
1839 if (index == arguments->size - 1) {
1840 RUBY_ASSERT(post_splat_counter > 0);
1841 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(post_splat_counter));
1842 }
1843 else {
1844 pm_node_t *next_arg = arguments->nodes[index + 1];
1845
1846 switch (PM_NODE_TYPE(next_arg)) {
1847 // A keyword hash node contains all keyword arguments as AssocNodes and AssocSplatNodes
1848 case PM_KEYWORD_HASH_NODE: {
1849 PUSH_INSN1(ret, location, newarray, INT2FIX(post_splat_counter));
1850 PUSH_INSN(ret, location, concatarray);
1851 break;
1852 }
1853 case PM_SPLAT_NODE: {
1854 PUSH_INSN1(ret, location, newarray, INT2FIX(post_splat_counter));
1855 PUSH_INSN(ret, location, concatarray);
1856 break;
1857 }
1858 default:
1859 break;
1860 }
1861 }
1862 }
1863 else {
1864 orig_argc++;
1865 }
1866 }
1867 }
1868 }
1869 }
1870
1871 if (has_splat) orig_argc++;
1872 if (has_keyword_splat) orig_argc++;
1873 return orig_argc;
1874}
1875
1880static inline bool
1881pm_setup_args_dup_rest_p(const pm_node_t *node)
1882{
1883 switch (PM_NODE_TYPE(node)) {
1884 case PM_BACK_REFERENCE_READ_NODE:
1885 case PM_CLASS_VARIABLE_READ_NODE:
1886 case PM_CONSTANT_READ_NODE:
1887 case PM_FALSE_NODE:
1888 case PM_FLOAT_NODE:
1889 case PM_GLOBAL_VARIABLE_READ_NODE:
1890 case PM_IMAGINARY_NODE:
1891 case PM_INSTANCE_VARIABLE_READ_NODE:
1892 case PM_INTEGER_NODE:
1893 case PM_LAMBDA_NODE:
1894 case PM_LOCAL_VARIABLE_READ_NODE:
1895 case PM_NIL_NODE:
1896 case PM_NUMBERED_REFERENCE_READ_NODE:
1897 case PM_RATIONAL_NODE:
1898 case PM_REGULAR_EXPRESSION_NODE:
1899 case PM_SELF_NODE:
1900 case PM_STRING_NODE:
1901 case PM_SYMBOL_NODE:
1902 case PM_TRUE_NODE:
1903 return false;
1904 case PM_CONSTANT_PATH_NODE: {
1905 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
1906 if (cast->parent != NULL) {
1907 return pm_setup_args_dup_rest_p(cast->parent);
1908 }
1909 return false;
1910 }
1911 case PM_IMPLICIT_NODE:
1912 return pm_setup_args_dup_rest_p(((const pm_implicit_node_t *) node)->value);
1913 case PM_ARRAY_NODE: {
1914 const pm_array_node_t *cast = (const pm_array_node_t *) node;
1915 for (size_t index = 0; index < cast->elements.size; index++) {
1916 if (pm_setup_args_dup_rest_p(cast->elements.nodes[index])) {
1917 return true;
1918 }
1919 }
1920 return false;
1921 }
1922 default:
1923 return true;
1924 }
1925}
1926
1930static int
1931pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block, int *flags, struct rb_callinfo_kwarg **kw_arg, rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_node_location_t *node_location)
1932{
1933 int dup_rest = SPLATARRAY_TRUE;
1934
1935 const pm_node_list_t *arguments;
1936 size_t arguments_size;
1937
1938 // Calls like foo(1, *f, **hash) that use splat and kwsplat could be
1939 // eligible for eliding duping the rest array (dup_reset=false).
1940 if (
1941 arguments_node != NULL &&
1942 (arguments = &arguments_node->arguments, arguments_size = arguments->size) >= 2 &&
1943 PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_SPLAT) &&
1944 !PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_MULTIPLE_SPLATS) &&
1945 PM_NODE_TYPE_P(arguments->nodes[arguments_size - 1], PM_KEYWORD_HASH_NODE)
1946 ) {
1947 // Start by assuming that dup_rest=false, then check each element of the
1948 // hash to ensure we don't need to flip it back to true (in case one of
1949 // the elements could potentially mutate the array).
1950 dup_rest = SPLATARRAY_FALSE;
1951
1952 const pm_keyword_hash_node_t *keyword_hash = (const pm_keyword_hash_node_t *) arguments->nodes[arguments_size - 1];
1953 const pm_node_list_t *elements = &keyword_hash->elements;
1954
1955 for (size_t index = 0; dup_rest == SPLATARRAY_FALSE && index < elements->size; index++) {
1956 const pm_node_t *element = elements->nodes[index];
1957
1958 switch (PM_NODE_TYPE(element)) {
1959 case PM_ASSOC_NODE: {
1960 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
1961 if (pm_setup_args_dup_rest_p(assoc->key) || pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE;
1962 break;
1963 }
1964 case PM_ASSOC_SPLAT_NODE: {
1965 const pm_assoc_splat_node_t *assoc = (const pm_assoc_splat_node_t *) element;
1966 if (assoc->value != NULL && pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE;
1967 break;
1968 }
1969 default:
1970 break;
1971 }
1972 }
1973 }
1974
1975 int initial_dup_rest = dup_rest;
1976 int argc;
1977
1978 if (block && PM_NODE_TYPE_P(block, PM_BLOCK_ARGUMENT_NODE)) {
1979 // We compile the `&block_arg` expression first and stitch it later
1980 // since the nature of the expression influences whether splat should
1981 // duplicate the array.
1982 bool regular_block_arg = true;
1983 const pm_node_t *block_expr = ((const pm_block_argument_node_t *)block)->expression;
1984
1985 if (block_expr && pm_setup_args_dup_rest_p(block_expr)) {
1986 dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
1987 initial_dup_rest = dup_rest;
1988 }
1989
1990 DECL_ANCHOR(block_arg);
1991 pm_compile_node(iseq, block, block_arg, false, scope_node);
1992
1993 *flags |= VM_CALL_ARGS_BLOCKARG;
1994
1995 if (LIST_INSN_SIZE_ONE(block_arg)) {
1996 LINK_ELEMENT *elem = FIRST_ELEMENT(block_arg);
1997 if (IS_INSN(elem)) {
1998 INSN *iobj = (INSN *) elem;
1999 if (iobj->insn_id == BIN(getblockparam)) {
2000 iobj->insn_id = BIN(getblockparamproxy);
2001 }
2002
2003 // Allow splat without duplication for simple one-instruction
2004 // block arguments like `&arg`. It is known that this
2005 // optimization can be too aggressive in some cases. See
2006 // [Bug #16504].
2007 regular_block_arg = false;
2008 }
2009 }
2010
2011 argc = pm_setup_args_core(arguments_node, block, flags, regular_block_arg, kw_arg, &dup_rest, iseq, ret, scope_node, node_location);
2012 PUSH_SEQ(ret, block_arg);
2013 }
2014 else {
2015 argc = pm_setup_args_core(arguments_node, block, flags, false, kw_arg, &dup_rest, iseq, ret, scope_node, node_location);
2016 }
2017
2018 // If the dup_rest flag was consumed while compiling the arguments (which
2019 // effectively means we found the splat node), then it would have changed
2020 // during the call to pm_setup_args_core. In this case, we want to add the
2021 // VM_CALL_ARGS_SPLAT_MUT flag.
2022 if (*flags & VM_CALL_ARGS_SPLAT && dup_rest != initial_dup_rest) {
2023 *flags |= VM_CALL_ARGS_SPLAT_MUT;
2024 }
2025
2026 return argc;
2027}
2028
2039static void
2040pm_compile_index_operator_write_node(rb_iseq_t *iseq, const pm_index_operator_write_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
2041{
2042 const pm_node_location_t location = *node_location;
2043 if (!popped) PUSH_INSN(ret, location, putnil);
2044
2045 PM_COMPILE_NOT_POPPED(node->receiver);
2046
2047 int boff = (node->block == NULL ? 0 : 1);
2048 int flag = PM_NODE_TYPE_P(node->receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
2049 struct rb_callinfo_kwarg *keywords = NULL;
2050 int argc = pm_setup_args(node->arguments, (const pm_node_t *) node->block, &flag, &keywords, iseq, ret, scope_node, node_location);
2051
2052 if ((argc > 0 || boff) && (flag & VM_CALL_KW_SPLAT)) {
2053 if (boff) {
2054 PUSH_INSN(ret, location, splatkw);
2055 }
2056 else {
2057 PUSH_INSN(ret, location, dup);
2058 PUSH_INSN(ret, location, splatkw);
2059 PUSH_INSN(ret, location, pop);
2060 }
2061 }
2062
2063 int dup_argn = argc + 1 + boff;
2064 int keyword_len = 0;
2065
2066 if (keywords) {
2067 keyword_len = keywords->keyword_len;
2068 dup_argn += keyword_len;
2069 }
2070
2071 PUSH_INSN1(ret, location, dupn, INT2FIX(dup_argn));
2072 PUSH_SEND_R(ret, location, idAREF, INT2FIX(argc), NULL, INT2FIX(flag & ~(VM_CALL_ARGS_SPLAT_MUT | VM_CALL_KW_SPLAT_MUT)), keywords);
2073 PM_COMPILE_NOT_POPPED(node->value);
2074
2075 ID id_operator = pm_constant_id_lookup(scope_node, node->binary_operator);
2076 PUSH_SEND(ret, location, id_operator, INT2FIX(1));
2077
2078 if (!popped) {
2079 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
2080 }
2081 if (flag & VM_CALL_ARGS_SPLAT) {
2082 if (flag & VM_CALL_KW_SPLAT) {
2083 PUSH_INSN1(ret, location, topn, INT2FIX(2 + boff));
2084
2085 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
2086 PUSH_INSN1(ret, location, splatarray, Qtrue);
2087 flag |= VM_CALL_ARGS_SPLAT_MUT;
2088 }
2089
2090 PUSH_INSN(ret, location, swap);
2091 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
2092 PUSH_INSN1(ret, location, setn, INT2FIX(2 + boff));
2093 PUSH_INSN(ret, location, pop);
2094 }
2095 else {
2096 if (boff > 0) {
2097 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
2098 PUSH_INSN(ret, location, swap);
2099 PUSH_INSN(ret, location, pop);
2100 }
2101 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
2102 PUSH_INSN(ret, location, swap);
2103 PUSH_INSN1(ret, location, splatarray, Qtrue);
2104 PUSH_INSN(ret, location, swap);
2105 flag |= VM_CALL_ARGS_SPLAT_MUT;
2106 }
2107 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
2108 if (boff > 0) {
2109 PUSH_INSN1(ret, location, setn, INT2FIX(3));
2110 PUSH_INSN(ret, location, pop);
2111 PUSH_INSN(ret, location, pop);
2112 }
2113 }
2114
2115 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc), NULL, INT2FIX(flag), keywords);
2116 }
2117 else if (flag & VM_CALL_KW_SPLAT) {
2118 if (boff > 0) {
2119 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2120 PUSH_INSN(ret, location, swap);
2121 PUSH_INSN1(ret, location, setn, INT2FIX(3));
2122 PUSH_INSN(ret, location, pop);
2123 }
2124 PUSH_INSN(ret, location, swap);
2125 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2126 }
2127 else if (keyword_len) {
2128 PUSH_INSN(ret, location, dup);
2129 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 2));
2130 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 1));
2131 PUSH_INSN(ret, location, pop);
2132 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2133 }
2134 else {
2135 if (boff > 0) {
2136 PUSH_INSN(ret, location, swap);
2137 }
2138 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2139 }
2140
2141 PUSH_INSN(ret, location, pop);
2142}
2143
2156static void
2157pm_compile_index_control_flow_write_node(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_t *receiver, const pm_arguments_node_t *arguments, const pm_block_argument_node_t *block, const pm_node_t *value, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
2158{
2159 const pm_node_location_t location = *node_location;
2160 if (!popped) PUSH_INSN(ret, location, putnil);
2161 PM_COMPILE_NOT_POPPED(receiver);
2162
2163 int boff = (block == NULL ? 0 : 1);
2164 int flag = PM_NODE_TYPE_P(receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
2165 struct rb_callinfo_kwarg *keywords = NULL;
2166 int argc = pm_setup_args(arguments, (const pm_node_t *) block, &flag, &keywords, iseq, ret, scope_node, node_location);
2167
2168 if ((argc > 0 || boff) && (flag & VM_CALL_KW_SPLAT)) {
2169 if (boff) {
2170 PUSH_INSN(ret, location, splatkw);
2171 }
2172 else {
2173 PUSH_INSN(ret, location, dup);
2174 PUSH_INSN(ret, location, splatkw);
2175 PUSH_INSN(ret, location, pop);
2176 }
2177 }
2178
2179 int dup_argn = argc + 1 + boff;
2180 int keyword_len = 0;
2181
2182 if (keywords) {
2183 keyword_len = keywords->keyword_len;
2184 dup_argn += keyword_len;
2185 }
2186
2187 PUSH_INSN1(ret, location, dupn, INT2FIX(dup_argn));
2188 PUSH_SEND_R(ret, location, idAREF, INT2FIX(argc), NULL, INT2FIX(flag & ~(VM_CALL_ARGS_SPLAT_MUT | VM_CALL_KW_SPLAT_MUT)), keywords);
2189
2190 LABEL *label = NEW_LABEL(location.line);
2191 LABEL *lfin = NEW_LABEL(location.line);
2192
2193 PUSH_INSN(ret, location, dup);
2194 if (PM_NODE_TYPE_P(node, PM_INDEX_AND_WRITE_NODE)) {
2195 PUSH_INSNL(ret, location, branchunless, label);
2196 }
2197 else {
2198 PUSH_INSNL(ret, location, branchif, label);
2199 }
2200
2201 PUSH_INSN(ret, location, pop);
2202 PM_COMPILE_NOT_POPPED(value);
2203
2204 if (!popped) {
2205 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
2206 }
2207
2208 if (flag & VM_CALL_ARGS_SPLAT) {
2209 if (flag & VM_CALL_KW_SPLAT) {
2210 PUSH_INSN1(ret, location, topn, INT2FIX(2 + boff));
2211 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
2212 PUSH_INSN1(ret, location, splatarray, Qtrue);
2213 flag |= VM_CALL_ARGS_SPLAT_MUT;
2214 }
2215
2216 PUSH_INSN(ret, location, swap);
2217 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
2218 PUSH_INSN1(ret, location, setn, INT2FIX(2 + boff));
2219 PUSH_INSN(ret, location, pop);
2220 }
2221 else {
2222 if (boff > 0) {
2223 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
2224 PUSH_INSN(ret, location, swap);
2225 PUSH_INSN(ret, location, pop);
2226 }
2227 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
2228 PUSH_INSN(ret, location, swap);
2229 PUSH_INSN1(ret, location, splatarray, Qtrue);
2230 PUSH_INSN(ret, location, swap);
2231 flag |= VM_CALL_ARGS_SPLAT_MUT;
2232 }
2233 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
2234 if (boff > 0) {
2235 PUSH_INSN1(ret, location, setn, INT2FIX(3));
2236 PUSH_INSN(ret, location, pop);
2237 PUSH_INSN(ret, location, pop);
2238 }
2239 }
2240
2241 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc), NULL, INT2FIX(flag), keywords);
2242 }
2243 else if (flag & VM_CALL_KW_SPLAT) {
2244 if (boff > 0) {
2245 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2246 PUSH_INSN(ret, location, swap);
2247 PUSH_INSN1(ret, location, setn, INT2FIX(3));
2248 PUSH_INSN(ret, location, pop);
2249 }
2250
2251 PUSH_INSN(ret, location, swap);
2252 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2253 }
2254 else if (keyword_len) {
2255 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 1));
2256 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 0));
2257 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2258 }
2259 else {
2260 if (boff > 0) {
2261 PUSH_INSN(ret, location, swap);
2262 }
2263 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2264 }
2265
2266 PUSH_INSN(ret, location, pop);
2267 PUSH_INSNL(ret, location, jump, lfin);
2268 PUSH_LABEL(ret, label);
2269 if (!popped) {
2270 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
2271 }
2272 PUSH_INSN1(ret, location, adjuststack, INT2FIX(dup_argn + 1));
2273 PUSH_LABEL(ret, lfin);
2274}
2275
2276// When we compile a pattern matching expression, we use the stack as a scratch
2277// space to store lots of different values (consider it like we have a pattern
2278// matching function and we need space for a bunch of different local
2279// variables). The "base index" refers to the index on the stack where we
2280// started compiling the pattern matching expression. These offsets from that
2281// base index indicate the location of the various locals we need.
2282#define PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE 0
2283#define PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING 1
2284#define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P 2
2285#define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE 3
2286#define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY 4
2287
2288// A forward declaration because this is the recursive function that handles
2289// compiling a pattern. It can be reentered by nesting patterns, as in the case
2290// of arrays or hashes.
2291static int pm_compile_pattern(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *matched_label, LABEL *unmatched_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index);
2292
2297static int
2298pm_compile_pattern_generic_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, VALUE message, unsigned int base_index)
2299{
2300 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2301 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2302
2303 PUSH_INSN(ret, location, dup);
2304 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2305
2306 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2307 PUSH_INSN1(ret, location, putobject, message);
2308 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2309 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(2));
2310 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2311
2312 PUSH_INSN1(ret, location, putobject, Qfalse);
2313 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2314
2315 PUSH_INSN(ret, location, pop);
2316 PUSH_INSN(ret, location, pop);
2317 PUSH_LABEL(ret, match_succeeded_label);
2318
2319 return COMPILE_OK;
2320}
2321
2327static int
2328pm_compile_pattern_length_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, VALUE message, VALUE length, unsigned int base_index)
2329{
2330 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2331 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2332
2333 PUSH_INSN(ret, location, dup);
2334 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2335
2336 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2337 PUSH_INSN1(ret, location, putobject, message);
2338 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2339 PUSH_INSN(ret, location, dup);
2340 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2341 PUSH_INSN1(ret, location, putobject, length);
2342 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(4));
2343 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2344
2345 PUSH_INSN1(ret, location, putobject, Qfalse);
2346 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2347
2348 PUSH_INSN(ret, location, pop);
2349 PUSH_INSN(ret, location, pop);
2350 PUSH_LABEL(ret, match_succeeded_label);
2351
2352 return COMPILE_OK;
2353}
2354
2360static int
2361pm_compile_pattern_eqq_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, unsigned int base_index)
2362{
2363 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2364 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2365
2366 PUSH_INSN(ret, location, dup);
2367 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2368 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2369
2370 VALUE operand = rb_fstring_lit("%p === %p does not return true");
2371 PUSH_INSN1(ret, location, putobject, operand);
2372
2373 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2374 PUSH_INSN1(ret, location, topn, INT2FIX(5));
2375 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2376 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2377 PUSH_INSN1(ret, location, putobject, Qfalse);
2378 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2379 PUSH_INSN(ret, location, pop);
2380 PUSH_INSN(ret, location, pop);
2381
2382 PUSH_LABEL(ret, match_succeeded_label);
2383 PUSH_INSN1(ret, location, setn, INT2FIX(2));
2384 PUSH_INSN(ret, location, pop);
2385 PUSH_INSN(ret, location, pop);
2386
2387 return COMPILE_OK;
2388}
2389
2396static int
2397pm_compile_pattern_match(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *unmatched_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index)
2398{
2399 LABEL *matched_label = NEW_LABEL(pm_node_line_number(scope_node->parser, node));
2400 CHECK(pm_compile_pattern(iseq, scope_node, node, ret, matched_label, unmatched_label, in_single_pattern, use_deconstructed_cache, base_index));
2401 PUSH_LABEL(ret, matched_label);
2402 return COMPILE_OK;
2403}
2404
2410static int
2411pm_compile_pattern_deconstruct(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *deconstruct_label, LABEL *match_failed_label, LABEL *deconstructed_label, LABEL *type_error_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index)
2412{
2413 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2414
2415 if (use_deconstructed_cache) {
2416 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2417 PUSH_INSNL(ret, location, branchnil, deconstruct_label);
2418
2419 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2420 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2421
2422 PUSH_INSN(ret, location, pop);
2423 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE - 1));
2424 PUSH_INSNL(ret, location, jump, deconstructed_label);
2425 }
2426 else {
2427 PUSH_INSNL(ret, location, jump, deconstruct_label);
2428 }
2429
2430 PUSH_LABEL(ret, deconstruct_label);
2431 PUSH_INSN(ret, location, dup);
2432
2433 VALUE operand = ID2SYM(rb_intern("deconstruct"));
2434 PUSH_INSN1(ret, location, putobject, operand);
2435 PUSH_SEND(ret, location, idRespond_to, INT2FIX(1));
2436
2437 if (use_deconstructed_cache) {
2438 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE + 1));
2439 }
2440
2441 if (in_single_pattern) {
2442 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p does not respond to #deconstruct"), base_index + 1));
2443 }
2444
2445 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2446 PUSH_SEND(ret, location, rb_intern("deconstruct"), INT2FIX(0));
2447
2448 if (use_deconstructed_cache) {
2449 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2450 }
2451
2452 PUSH_INSN(ret, location, dup);
2453 PUSH_INSN1(ret, location, checktype, INT2FIX(T_ARRAY));
2454 PUSH_INSNL(ret, location, branchunless, type_error_label);
2455 PUSH_LABEL(ret, deconstructed_label);
2456
2457 return COMPILE_OK;
2458}
2459
2464static int
2465pm_compile_pattern_constant(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *match_failed_label, bool in_single_pattern, unsigned int base_index)
2466{
2467 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2468
2469 PUSH_INSN(ret, location, dup);
2470 PM_COMPILE_NOT_POPPED(node);
2471
2472 if (in_single_pattern) {
2473 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
2474 }
2475 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE));
2476 if (in_single_pattern) {
2477 CHECK(pm_compile_pattern_eqq_error(iseq, scope_node, node, ret, base_index + 3));
2478 }
2479 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2480 return COMPILE_OK;
2481}
2482
2487static void
2488pm_compile_pattern_error_handler(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *done_label, bool popped)
2489{
2490 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2491 LABEL *key_error_label = NEW_LABEL(location.line);
2492 LABEL *cleanup_label = NEW_LABEL(location.line);
2493
2494 struct rb_callinfo_kwarg *kw_arg = rb_xmalloc_mul_add(2, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
2495 kw_arg->references = 0;
2496 kw_arg->keyword_len = 2;
2497 kw_arg->keywords[0] = ID2SYM(rb_intern("matchee"));
2498 kw_arg->keywords[1] = ID2SYM(rb_intern("key"));
2499
2500 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2501 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2502 PUSH_INSNL(ret, location, branchif, key_error_label);
2503
2504 PUSH_INSN1(ret, location, putobject, rb_eNoMatchingPatternError);
2505 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2506
2507 {
2508 VALUE operand = rb_fstring_lit("%p: %s");
2509 PUSH_INSN1(ret, location, putobject, operand);
2510 }
2511
2512 PUSH_INSN1(ret, location, topn, INT2FIX(4));
2513 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 6));
2514 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2515 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2516 PUSH_INSNL(ret, location, jump, cleanup_label);
2517
2518 PUSH_LABEL(ret, key_error_label);
2519 PUSH_INSN1(ret, location, putobject, rb_eNoMatchingPatternKeyError);
2520 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2521
2522 {
2523 VALUE operand = rb_fstring_lit("%p: %s");
2524 PUSH_INSN1(ret, location, putobject, operand);
2525 }
2526
2527 PUSH_INSN1(ret, location, topn, INT2FIX(4));
2528 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 6));
2529 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2530 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE + 4));
2531 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY + 5));
2532 PUSH_SEND_R(ret, location, rb_intern("new"), INT2FIX(1), NULL, INT2FIX(VM_CALL_KWARG), kw_arg);
2533 PUSH_SEND(ret, location, id_core_raise, INT2FIX(1));
2534 PUSH_LABEL(ret, cleanup_label);
2535
2536 PUSH_INSN1(ret, location, adjuststack, INT2FIX(7));
2537 if (!popped) PUSH_INSN(ret, location, putnil);
2538 PUSH_INSNL(ret, location, jump, done_label);
2539 PUSH_INSN1(ret, location, dupn, INT2FIX(5));
2540 if (popped) PUSH_INSN(ret, location, putnil);
2541}
2542
2546static int
2547pm_compile_pattern(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *matched_label, LABEL *unmatched_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index)
2548{
2549 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2550
2551 switch (PM_NODE_TYPE(node)) {
2552 case PM_ARRAY_PATTERN_NODE: {
2553 // Array patterns in pattern matching are triggered by using commas in
2554 // a pattern or wrapping it in braces. They are represented by a
2555 // ArrayPatternNode. This looks like:
2556 //
2557 // foo => [1, 2, 3]
2558 //
2559 // It can optionally have a splat in the middle of it, which can
2560 // optionally have a name attached.
2561 const pm_array_pattern_node_t *cast = (const pm_array_pattern_node_t *) node;
2562
2563 const size_t requireds_size = cast->requireds.size;
2564 const size_t posts_size = cast->posts.size;
2565 const size_t minimum_size = requireds_size + posts_size;
2566
2567 bool rest_named = false;
2568 bool use_rest_size = false;
2569
2570 if (cast->rest != NULL) {
2571 rest_named = (PM_NODE_TYPE_P(cast->rest, PM_SPLAT_NODE) && ((const pm_splat_node_t *) cast->rest)->expression != NULL);
2572 use_rest_size = (rest_named || (!rest_named && posts_size > 0));
2573 }
2574
2575 LABEL *match_failed_label = NEW_LABEL(location.line);
2576 LABEL *type_error_label = NEW_LABEL(location.line);
2577 LABEL *deconstruct_label = NEW_LABEL(location.line);
2578 LABEL *deconstructed_label = NEW_LABEL(location.line);
2579
2580 if (use_rest_size) {
2581 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2582 PUSH_INSN(ret, location, swap);
2583 base_index++;
2584 }
2585
2586 if (cast->constant != NULL) {
2587 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2588 }
2589
2590 CHECK(pm_compile_pattern_deconstruct(iseq, scope_node, node, ret, deconstruct_label, match_failed_label, deconstructed_label, type_error_label, in_single_pattern, use_deconstructed_cache, base_index));
2591
2592 PUSH_INSN(ret, location, dup);
2593 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2594 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2595 PUSH_SEND(ret, location, cast->rest == NULL ? idEq : idGE, INT2FIX(1));
2596 if (in_single_pattern) {
2597 VALUE message = cast->rest == NULL ? rb_fstring_lit("%p length mismatch (given %p, expected %p)") : rb_fstring_lit("%p length mismatch (given %p, expected %p+)");
2598 CHECK(pm_compile_pattern_length_error(iseq, scope_node, node, ret, message, INT2FIX(minimum_size), base_index + 1));
2599 }
2600 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2601
2602 for (size_t index = 0; index < requireds_size; index++) {
2603 const pm_node_t *required = cast->requireds.nodes[index];
2604 PUSH_INSN(ret, location, dup);
2605 PUSH_INSN1(ret, location, putobject, INT2FIX(index));
2606 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2607 CHECK(pm_compile_pattern_match(iseq, scope_node, required, ret, match_failed_label, in_single_pattern, false, base_index + 1));
2608 }
2609
2610 if (cast->rest != NULL) {
2611 if (rest_named) {
2612 PUSH_INSN(ret, location, dup);
2613 PUSH_INSN1(ret, location, putobject, INT2FIX(requireds_size));
2614 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2615 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2616 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2617 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2618 PUSH_INSN1(ret, location, setn, INT2FIX(4));
2619 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2620 CHECK(pm_compile_pattern_match(iseq, scope_node, ((const pm_splat_node_t *) cast->rest)->expression, ret, match_failed_label, in_single_pattern, false, base_index + 1));
2621 }
2622 else if (posts_size > 0) {
2623 PUSH_INSN(ret, location, dup);
2624 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2625 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2626 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2627 PUSH_INSN1(ret, location, setn, INT2FIX(2));
2628 PUSH_INSN(ret, location, pop);
2629 }
2630 }
2631
2632 for (size_t index = 0; index < posts_size; index++) {
2633 const pm_node_t *post = cast->posts.nodes[index];
2634 PUSH_INSN(ret, location, dup);
2635
2636 PUSH_INSN1(ret, location, putobject, INT2FIX(requireds_size + index));
2637 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2638 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2639 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2640 CHECK(pm_compile_pattern_match(iseq, scope_node, post, ret, match_failed_label, in_single_pattern, false, base_index + 1));
2641 }
2642
2643 PUSH_INSN(ret, location, pop);
2644 if (use_rest_size) {
2645 PUSH_INSN(ret, location, pop);
2646 }
2647
2648 PUSH_INSNL(ret, location, jump, matched_label);
2649 PUSH_INSN(ret, location, putnil);
2650 if (use_rest_size) {
2651 PUSH_INSN(ret, location, putnil);
2652 }
2653
2654 PUSH_LABEL(ret, type_error_label);
2655 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2656 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2657
2658 {
2659 VALUE operand = rb_fstring_lit("deconstruct must return Array");
2660 PUSH_INSN1(ret, location, putobject, operand);
2661 }
2662
2663 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2664 PUSH_INSN(ret, location, pop);
2665
2666 PUSH_LABEL(ret, match_failed_label);
2667 PUSH_INSN(ret, location, pop);
2668 if (use_rest_size) {
2669 PUSH_INSN(ret, location, pop);
2670 }
2671
2672 PUSH_INSNL(ret, location, jump, unmatched_label);
2673 break;
2674 }
2675 case PM_FIND_PATTERN_NODE: {
2676 // Find patterns in pattern matching are triggered by using commas in
2677 // a pattern or wrapping it in braces and using a splat on both the left
2678 // and right side of the pattern. This looks like:
2679 //
2680 // foo => [*, 1, 2, 3, *]
2681 //
2682 // There can be any number of requireds in the middle. The splats on
2683 // both sides can optionally have names attached.
2684 const pm_find_pattern_node_t *cast = (const pm_find_pattern_node_t *) node;
2685 const size_t size = cast->requireds.size;
2686
2687 LABEL *match_failed_label = NEW_LABEL(location.line);
2688 LABEL *type_error_label = NEW_LABEL(location.line);
2689 LABEL *deconstruct_label = NEW_LABEL(location.line);
2690 LABEL *deconstructed_label = NEW_LABEL(location.line);
2691
2692 if (cast->constant) {
2693 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2694 }
2695
2696 CHECK(pm_compile_pattern_deconstruct(iseq, scope_node, node, ret, deconstruct_label, match_failed_label, deconstructed_label, type_error_label, in_single_pattern, use_deconstructed_cache, base_index));
2697
2698 PUSH_INSN(ret, location, dup);
2699 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2700 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2701 PUSH_SEND(ret, location, idGE, INT2FIX(1));
2702 if (in_single_pattern) {
2703 CHECK(pm_compile_pattern_length_error(iseq, scope_node, node, ret, rb_fstring_lit("%p length mismatch (given %p, expected %p+)"), INT2FIX(size), base_index + 1));
2704 }
2705 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2706
2707 {
2708 LABEL *while_begin_label = NEW_LABEL(location.line);
2709 LABEL *next_loop_label = NEW_LABEL(location.line);
2710 LABEL *find_succeeded_label = NEW_LABEL(location.line);
2711 LABEL *find_failed_label = NEW_LABEL(location.line);
2712
2713 PUSH_INSN(ret, location, dup);
2714 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2715
2716 PUSH_INSN(ret, location, dup);
2717 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2718 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2719 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2720 PUSH_LABEL(ret, while_begin_label);
2721
2722 PUSH_INSN(ret, location, dup);
2723 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2724 PUSH_SEND(ret, location, idLE, INT2FIX(1));
2725 PUSH_INSNL(ret, location, branchunless, find_failed_label);
2726
2727 for (size_t index = 0; index < size; index++) {
2728 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2729 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2730
2731 if (index != 0) {
2732 PUSH_INSN1(ret, location, putobject, INT2FIX(index));
2733 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2734 }
2735
2736 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2737 CHECK(pm_compile_pattern_match(iseq, scope_node, cast->requireds.nodes[index], ret, next_loop_label, in_single_pattern, false, base_index + 4));
2738 }
2739
2740 const pm_splat_node_t *left = cast->left;
2741
2742 if (left->expression != NULL) {
2743 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2744 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2745 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2746 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2747 CHECK(pm_compile_pattern_match(iseq, scope_node, left->expression, ret, find_failed_label, in_single_pattern, false, base_index + 4));
2748 }
2749
2750 RUBY_ASSERT(PM_NODE_TYPE_P(cast->right, PM_SPLAT_NODE));
2751 const pm_splat_node_t *right = (const pm_splat_node_t *) cast->right;
2752
2753 if (right->expression != NULL) {
2754 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2755 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2756 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2757 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2758 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2759 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2760 pm_compile_pattern_match(iseq, scope_node, right->expression, ret, find_failed_label, in_single_pattern, false, base_index + 4);
2761 }
2762
2763 PUSH_INSNL(ret, location, jump, find_succeeded_label);
2764
2765 PUSH_LABEL(ret, next_loop_label);
2766 PUSH_INSN1(ret, location, putobject, INT2FIX(1));
2767 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2768 PUSH_INSNL(ret, location, jump, while_begin_label);
2769
2770 PUSH_LABEL(ret, find_failed_label);
2771 PUSH_INSN1(ret, location, adjuststack, INT2FIX(3));
2772 if (in_single_pattern) {
2773 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2774
2775 {
2776 VALUE operand = rb_fstring_lit("%p does not match to find pattern");
2777 PUSH_INSN1(ret, location, putobject, operand);
2778 }
2779
2780 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2781 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(2));
2782 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2783
2784 PUSH_INSN1(ret, location, putobject, Qfalse);
2785 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2786
2787 PUSH_INSN(ret, location, pop);
2788 PUSH_INSN(ret, location, pop);
2789 }
2790 PUSH_INSNL(ret, location, jump, match_failed_label);
2791 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
2792
2793 PUSH_LABEL(ret, find_succeeded_label);
2794 PUSH_INSN1(ret, location, adjuststack, INT2FIX(3));
2795 }
2796
2797 PUSH_INSN(ret, location, pop);
2798 PUSH_INSNL(ret, location, jump, matched_label);
2799 PUSH_INSN(ret, location, putnil);
2800
2801 PUSH_LABEL(ret, type_error_label);
2802 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2803 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2804
2805 {
2806 VALUE operand = rb_fstring_lit("deconstruct must return Array");
2807 PUSH_INSN1(ret, location, putobject, operand);
2808 }
2809
2810 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2811 PUSH_INSN(ret, location, pop);
2812
2813 PUSH_LABEL(ret, match_failed_label);
2814 PUSH_INSN(ret, location, pop);
2815 PUSH_INSNL(ret, location, jump, unmatched_label);
2816
2817 break;
2818 }
2819 case PM_HASH_PATTERN_NODE: {
2820 // Hash patterns in pattern matching are triggered by using labels and
2821 // values in a pattern or by using the ** operator. They are represented
2822 // by the HashPatternNode. This looks like:
2823 //
2824 // foo => { a: 1, b: 2, **bar }
2825 //
2826 // It can optionally have an assoc splat in the middle of it, which can
2827 // optionally have a name.
2828 const pm_hash_pattern_node_t *cast = (const pm_hash_pattern_node_t *) node;
2829
2830 // We don't consider it a "rest" parameter if it's a ** that is unnamed.
2831 bool has_rest = cast->rest != NULL && !(PM_NODE_TYPE_P(cast->rest, PM_ASSOC_SPLAT_NODE) && ((const pm_assoc_splat_node_t *) cast->rest)->value == NULL);
2832 bool has_keys = cast->elements.size > 0 || cast->rest != NULL;
2833
2834 LABEL *match_failed_label = NEW_LABEL(location.line);
2835 LABEL *type_error_label = NEW_LABEL(location.line);
2836 VALUE keys = Qnil;
2837
2838 if (has_keys && !has_rest) {
2839 keys = rb_ary_new_capa(cast->elements.size);
2840
2841 for (size_t index = 0; index < cast->elements.size; index++) {
2842 const pm_node_t *element = cast->elements.nodes[index];
2843 RUBY_ASSERT(PM_NODE_TYPE_P(element, PM_ASSOC_NODE));
2844
2845 const pm_node_t *key = ((const pm_assoc_node_t *) element)->key;
2846 RUBY_ASSERT(PM_NODE_TYPE_P(key, PM_SYMBOL_NODE));
2847
2848 VALUE symbol = ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) key));
2849 rb_ary_push(keys, symbol);
2850 }
2851 }
2852
2853 if (cast->constant) {
2854 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2855 }
2856
2857 PUSH_INSN(ret, location, dup);
2858
2859 {
2860 VALUE operand = ID2SYM(rb_intern("deconstruct_keys"));
2861 PUSH_INSN1(ret, location, putobject, operand);
2862 }
2863
2864 PUSH_SEND(ret, location, idRespond_to, INT2FIX(1));
2865 if (in_single_pattern) {
2866 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p does not respond to #deconstruct_keys"), base_index + 1));
2867 }
2868 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2869
2870 if (NIL_P(keys)) {
2871 PUSH_INSN(ret, location, putnil);
2872 }
2873 else {
2874 rb_obj_hide(keys);
2875 RB_OBJ_SET_FROZEN_SHAREABLE(keys);
2876 PUSH_INSN1(ret, location, duparray, keys);
2877 RB_OBJ_WRITTEN(iseq, Qundef, keys);
2878 }
2879 PUSH_SEND(ret, location, rb_intern("deconstruct_keys"), INT2FIX(1));
2880
2881 PUSH_INSN(ret, location, dup);
2882 PUSH_INSN1(ret, location, checktype, INT2FIX(T_HASH));
2883 PUSH_INSNL(ret, location, branchunless, type_error_label);
2884
2885 if (has_rest) {
2886 PUSH_SEND(ret, location, rb_intern("dup"), INT2FIX(0));
2887 }
2888
2889 if (has_keys) {
2890 DECL_ANCHOR(match_values);
2891
2892 for (size_t index = 0; index < cast->elements.size; index++) {
2893 const pm_node_t *element = cast->elements.nodes[index];
2894 RUBY_ASSERT(PM_NODE_TYPE_P(element, PM_ASSOC_NODE));
2895
2896 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
2897 const pm_node_t *key = assoc->key;
2898 RUBY_ASSERT(PM_NODE_TYPE_P(key, PM_SYMBOL_NODE));
2899
2900 VALUE symbol = ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) key));
2901 PUSH_INSN(ret, location, dup);
2902 PUSH_INSN1(ret, location, putobject, symbol);
2903 PUSH_SEND(ret, location, rb_intern("key?"), INT2FIX(1));
2904
2905 if (in_single_pattern) {
2906 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2907
2908 PUSH_INSN(ret, location, dup);
2909 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2910
2911 {
2912 VALUE operand = rb_str_freeze(rb_sprintf("key not found: %+"PRIsVALUE, symbol));
2913 RB_OBJ_SET_SHAREABLE(operand);
2914 PUSH_INSN1(ret, location, putobject, operand);
2915 }
2916
2917 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 2));
2918 PUSH_INSN1(ret, location, putobject, Qtrue);
2919 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 3));
2920 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2921 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE + 4));
2922 PUSH_INSN1(ret, location, putobject, symbol);
2923 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY + 5));
2924
2925 PUSH_INSN1(ret, location, adjuststack, INT2FIX(4));
2926 PUSH_LABEL(ret, match_succeeded_label);
2927 }
2928
2929 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2930 PUSH_INSN(match_values, location, dup);
2931 PUSH_INSN1(match_values, location, putobject, symbol);
2932 PUSH_SEND(match_values, location, has_rest ? rb_intern("delete") : idAREF, INT2FIX(1));
2933
2934 const pm_node_t *value = assoc->value;
2935 if (PM_NODE_TYPE_P(value, PM_IMPLICIT_NODE)) {
2936 value = ((const pm_implicit_node_t *) value)->value;
2937 }
2938
2939 CHECK(pm_compile_pattern_match(iseq, scope_node, value, match_values, match_failed_label, in_single_pattern, false, base_index + 1));
2940 }
2941
2942 PUSH_SEQ(ret, match_values);
2943 }
2944 else {
2945 PUSH_INSN(ret, location, dup);
2946 PUSH_SEND(ret, location, idEmptyP, INT2FIX(0));
2947 if (in_single_pattern) {
2948 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p is not empty"), base_index + 1));
2949 }
2950 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2951 }
2952
2953 if (has_rest) {
2954 switch (PM_NODE_TYPE(cast->rest)) {
2955 case PM_NO_KEYWORDS_PARAMETER_NODE: {
2956 PUSH_INSN(ret, location, dup);
2957 PUSH_SEND(ret, location, idEmptyP, INT2FIX(0));
2958 if (in_single_pattern) {
2959 pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("rest of %p is not empty"), base_index + 1);
2960 }
2961 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2962 break;
2963 }
2964 case PM_ASSOC_SPLAT_NODE: {
2965 const pm_assoc_splat_node_t *splat = (const pm_assoc_splat_node_t *) cast->rest;
2966 PUSH_INSN(ret, location, dup);
2967 pm_compile_pattern_match(iseq, scope_node, splat->value, ret, match_failed_label, in_single_pattern, false, base_index + 1);
2968 break;
2969 }
2970 default:
2971 rb_bug("unreachable");
2972 break;
2973 }
2974 }
2975
2976 PUSH_INSN(ret, location, pop);
2977 PUSH_INSNL(ret, location, jump, matched_label);
2978 PUSH_INSN(ret, location, putnil);
2979
2980 PUSH_LABEL(ret, type_error_label);
2981 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2982 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2983
2984 {
2985 VALUE operand = rb_fstring_lit("deconstruct_keys must return Hash");
2986 PUSH_INSN1(ret, location, putobject, operand);
2987 }
2988
2989 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2990 PUSH_INSN(ret, location, pop);
2991
2992 PUSH_LABEL(ret, match_failed_label);
2993 PUSH_INSN(ret, location, pop);
2994 PUSH_INSNL(ret, location, jump, unmatched_label);
2995 break;
2996 }
2997 case PM_CAPTURE_PATTERN_NODE: {
2998 // Capture patterns allow you to pattern match against an element in a
2999 // pattern and also capture the value into a local variable. This looks
3000 // like:
3001 //
3002 // [1] => [Integer => foo]
3003 //
3004 // In this case the `Integer => foo` will be represented by a
3005 // CapturePatternNode, which has both a value (the pattern to match
3006 // against) and a target (the place to write the variable into).
3007 const pm_capture_pattern_node_t *cast = (const pm_capture_pattern_node_t *) node;
3008
3009 LABEL *match_failed_label = NEW_LABEL(location.line);
3010
3011 PUSH_INSN(ret, location, dup);
3012 CHECK(pm_compile_pattern_match(iseq, scope_node, cast->value, ret, match_failed_label, in_single_pattern, use_deconstructed_cache, base_index + 1));
3013 CHECK(pm_compile_pattern(iseq, scope_node, (const pm_node_t *) cast->target, ret, matched_label, match_failed_label, in_single_pattern, false, base_index));
3014 PUSH_INSN(ret, location, putnil);
3015
3016 PUSH_LABEL(ret, match_failed_label);
3017 PUSH_INSN(ret, location, pop);
3018 PUSH_INSNL(ret, location, jump, unmatched_label);
3019
3020 break;
3021 }
3022 case PM_LOCAL_VARIABLE_TARGET_NODE: {
3023 // Local variables can be targeted by placing identifiers in the place
3024 // of a pattern. For example, foo in bar. This results in the value
3025 // being matched being written to that local variable.
3027 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
3028
3029 PUSH_SETLOCAL(ret, location, index.index, index.level);
3030 PUSH_INSNL(ret, location, jump, matched_label);
3031 break;
3032 }
3033 case PM_ALTERNATION_PATTERN_NODE: {
3034 // Alternation patterns allow you to specify multiple patterns in a
3035 // single expression using the | operator.
3037
3038 LABEL *matched_left_label = NEW_LABEL(location.line);
3039 LABEL *unmatched_left_label = NEW_LABEL(location.line);
3040
3041 // First, we're going to attempt to match against the left pattern. If
3042 // that pattern matches, then we'll skip matching the right pattern.
3043 PUSH_INSN(ret, location, dup);
3044 CHECK(pm_compile_pattern(iseq, scope_node, cast->left, ret, matched_left_label, unmatched_left_label, in_single_pattern, use_deconstructed_cache, base_index + 1));
3045
3046 // If we get here, then we matched on the left pattern. In this case we
3047 // should pop out the duplicate value that we preemptively added to
3048 // match against the right pattern and then jump to the match label.
3049 PUSH_LABEL(ret, matched_left_label);
3050 PUSH_INSN(ret, location, pop);
3051 PUSH_INSNL(ret, location, jump, matched_label);
3052 PUSH_INSN(ret, location, putnil);
3053
3054 // If we get here, then we didn't match on the left pattern. In this
3055 // case we attempt to match against the right pattern.
3056 PUSH_LABEL(ret, unmatched_left_label);
3057 CHECK(pm_compile_pattern(iseq, scope_node, cast->right, ret, matched_label, unmatched_label, in_single_pattern, use_deconstructed_cache, base_index));
3058 break;
3059 }
3060 case PM_PARENTHESES_NODE:
3061 // Parentheses are allowed to wrap expressions in pattern matching and
3062 // they do nothing since they can only wrap individual expressions and
3063 // not groups. In this case we'll recurse back into this same function
3064 // with the body of the parentheses.
3065 return pm_compile_pattern(iseq, scope_node, ((const pm_parentheses_node_t *) node)->body, ret, matched_label, unmatched_label, in_single_pattern, use_deconstructed_cache, base_index);
3066 case PM_PINNED_EXPRESSION_NODE:
3067 // Pinned expressions are a way to match against the value of an
3068 // expression that should be evaluated at runtime. This looks like:
3069 // foo in ^(bar). To compile these, we compile the expression as if it
3070 // were a literal value by falling through to the literal case.
3071 node = ((const pm_pinned_expression_node_t *) node)->expression;
3072 /* fallthrough */
3073 case PM_ARRAY_NODE:
3074 case PM_CLASS_VARIABLE_READ_NODE:
3075 case PM_CONSTANT_PATH_NODE:
3076 case PM_CONSTANT_READ_NODE:
3077 case PM_FALSE_NODE:
3078 case PM_FLOAT_NODE:
3079 case PM_GLOBAL_VARIABLE_READ_NODE:
3080 case PM_IMAGINARY_NODE:
3081 case PM_INSTANCE_VARIABLE_READ_NODE:
3082 case PM_IT_LOCAL_VARIABLE_READ_NODE:
3083 case PM_INTEGER_NODE:
3084 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE:
3085 case PM_INTERPOLATED_STRING_NODE:
3086 case PM_INTERPOLATED_SYMBOL_NODE:
3087 case PM_INTERPOLATED_X_STRING_NODE:
3088 case PM_LAMBDA_NODE:
3089 case PM_LOCAL_VARIABLE_READ_NODE:
3090 case PM_NIL_NODE:
3091 case PM_SOURCE_ENCODING_NODE:
3092 case PM_SOURCE_FILE_NODE:
3093 case PM_SOURCE_LINE_NODE:
3094 case PM_RANGE_NODE:
3095 case PM_RATIONAL_NODE:
3096 case PM_REGULAR_EXPRESSION_NODE:
3097 case PM_SELF_NODE:
3098 case PM_STRING_NODE:
3099 case PM_SYMBOL_NODE:
3100 case PM_TRUE_NODE:
3101 case PM_X_STRING_NODE: {
3102 // These nodes are all simple patterns, which means we'll use the
3103 // checkmatch instruction to match against them, which is effectively a
3104 // VM-level === operator.
3105 PM_COMPILE_NOT_POPPED(node);
3106 if (in_single_pattern) {
3107 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
3108 }
3109
3110 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE));
3111
3112 if (in_single_pattern) {
3113 pm_compile_pattern_eqq_error(iseq, scope_node, node, ret, base_index + 2);
3114 }
3115
3116 PUSH_INSNL(ret, location, branchif, matched_label);
3117 PUSH_INSNL(ret, location, jump, unmatched_label);
3118 break;
3119 }
3120 case PM_PINNED_VARIABLE_NODE: {
3121 // Pinned variables are a way to match against the value of a variable
3122 // without it looking like you're trying to write to the variable. This
3123 // looks like: foo in ^@bar. To compile these, we compile the variable
3124 // that they hold.
3125 const pm_pinned_variable_node_t *cast = (const pm_pinned_variable_node_t *) node;
3126 CHECK(pm_compile_pattern(iseq, scope_node, cast->variable, ret, matched_label, unmatched_label, in_single_pattern, true, base_index));
3127 break;
3128 }
3129 case PM_IF_NODE:
3130 case PM_UNLESS_NODE: {
3131 // If and unless nodes can show up here as guards on `in` clauses. This
3132 // looks like:
3133 //
3134 // case foo
3135 // in bar if baz?
3136 // qux
3137 // end
3138 //
3139 // Because we know they're in the modifier form and they can't have any
3140 // variation on this pattern, we compile them differently (more simply)
3141 // here than we would in the normal compilation path.
3142 const pm_node_t *predicate;
3143 const pm_node_t *statement;
3144
3145 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
3146 const pm_if_node_t *cast = (const pm_if_node_t *) node;
3147 predicate = cast->predicate;
3148
3149 RUBY_ASSERT(cast->statements != NULL && cast->statements->body.size == 1);
3150 statement = cast->statements->body.nodes[0];
3151 }
3152 else {
3153 const pm_unless_node_t *cast = (const pm_unless_node_t *) node;
3154 predicate = cast->predicate;
3155
3156 RUBY_ASSERT(cast->statements != NULL && cast->statements->body.size == 1);
3157 statement = cast->statements->body.nodes[0];
3158 }
3159
3160 CHECK(pm_compile_pattern_match(iseq, scope_node, statement, ret, unmatched_label, in_single_pattern, use_deconstructed_cache, base_index));
3161 PM_COMPILE_NOT_POPPED(predicate);
3162
3163 if (in_single_pattern) {
3164 LABEL *match_succeeded_label = NEW_LABEL(location.line);
3165
3166 PUSH_INSN(ret, location, dup);
3167 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
3168 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
3169 }
3170 else {
3171 PUSH_INSNL(ret, location, branchunless, match_succeeded_label);
3172 }
3173
3174 {
3175 VALUE operand = rb_fstring_lit("guard clause does not return true");
3176 PUSH_INSN1(ret, location, putobject, operand);
3177 }
3178
3179 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
3180 PUSH_INSN1(ret, location, putobject, Qfalse);
3181 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
3182
3183 PUSH_INSN(ret, location, pop);
3184 PUSH_INSN(ret, location, pop);
3185
3186 PUSH_LABEL(ret, match_succeeded_label);
3187 }
3188
3189 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
3190 PUSH_INSNL(ret, location, branchunless, unmatched_label);
3191 }
3192 else {
3193 PUSH_INSNL(ret, location, branchif, unmatched_label);
3194 }
3195
3196 PUSH_INSNL(ret, location, jump, matched_label);
3197 break;
3198 }
3199 default:
3200 // If we get here, then we have a node type that should not be in this
3201 // position. This would be a bug in the parser, because a different node
3202 // type should never have been created in this position in the tree.
3203 rb_bug("Unexpected node type in pattern matching expression: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
3204 break;
3205 }
3206
3207 return COMPILE_OK;
3208}
3209
3210#undef PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE
3211#undef PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING
3212#undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P
3213#undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE
3214#undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY
3215
3216// Generate a scope node from the given node.
3217void
3218pm_scope_node_init(const pm_node_t *node, pm_scope_node_t *scope, pm_scope_node_t *previous)
3219{
3220 // This is very important, otherwise the scope node could be seen as having
3221 // certain flags set that _should not_ be set.
3222 memset(scope, 0, sizeof(pm_scope_node_t));
3223
3224 scope->base.type = PM_SCOPE_NODE;
3225 scope->base.location.start = node->location.start;
3226 scope->base.location.end = node->location.end;
3227
3228 scope->previous = previous;
3229 scope->ast_node = (pm_node_t *) node;
3230
3231 if (previous) {
3232 scope->parser = previous->parser;
3233 scope->encoding = previous->encoding;
3234 scope->filepath_encoding = previous->filepath_encoding;
3235 scope->constants = previous->constants;
3236 scope->coverage_enabled = previous->coverage_enabled;
3237 scope->script_lines = previous->script_lines;
3238 }
3239
3240 switch (PM_NODE_TYPE(node)) {
3241 case PM_BLOCK_NODE: {
3242 const pm_block_node_t *cast = (const pm_block_node_t *) node;
3243 scope->body = cast->body;
3244 scope->locals = cast->locals;
3245 scope->parameters = cast->parameters;
3246 break;
3247 }
3248 case PM_CLASS_NODE: {
3249 const pm_class_node_t *cast = (const pm_class_node_t *) node;
3250 scope->body = cast->body;
3251 scope->locals = cast->locals;
3252 break;
3253 }
3254 case PM_DEF_NODE: {
3255 const pm_def_node_t *cast = (const pm_def_node_t *) node;
3256 scope->parameters = (pm_node_t *) cast->parameters;
3257 scope->body = cast->body;
3258 scope->locals = cast->locals;
3259 break;
3260 }
3261 case PM_ENSURE_NODE: {
3262 const pm_ensure_node_t *cast = (const pm_ensure_node_t *) node;
3263 scope->body = (pm_node_t *) node;
3264
3265 if (cast->statements != NULL) {
3266 scope->base.location.start = cast->statements->base.location.start;
3267 scope->base.location.end = cast->statements->base.location.end;
3268 }
3269
3270 break;
3271 }
3272 case PM_FOR_NODE: {
3273 const pm_for_node_t *cast = (const pm_for_node_t *) node;
3274 scope->body = (pm_node_t *) cast->statements;
3275 break;
3276 }
3277 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
3278 RUBY_ASSERT(node->flags & PM_REGULAR_EXPRESSION_FLAGS_ONCE);
3279 scope->body = (pm_node_t *) node;
3280 break;
3281 }
3282 case PM_LAMBDA_NODE: {
3283 const pm_lambda_node_t *cast = (const pm_lambda_node_t *) node;
3284 scope->parameters = cast->parameters;
3285 scope->body = cast->body;
3286 scope->locals = cast->locals;
3287
3288 if (cast->parameters != NULL) {
3289 scope->base.location.start = cast->parameters->location.start;
3290 }
3291 else {
3292 scope->base.location.start = cast->operator_loc.end;
3293 }
3294 break;
3295 }
3296 case PM_MODULE_NODE: {
3297 const pm_module_node_t *cast = (const pm_module_node_t *) node;
3298 scope->body = cast->body;
3299 scope->locals = cast->locals;
3300 break;
3301 }
3302 case PM_POST_EXECUTION_NODE: {
3303 const pm_post_execution_node_t *cast = (const pm_post_execution_node_t *) node;
3304 scope->body = (pm_node_t *) cast->statements;
3305 break;
3306 }
3307 case PM_PROGRAM_NODE: {
3308 const pm_program_node_t *cast = (const pm_program_node_t *) node;
3309 scope->body = (pm_node_t *) cast->statements;
3310 scope->locals = cast->locals;
3311 break;
3312 }
3313 case PM_RESCUE_NODE: {
3314 const pm_rescue_node_t *cast = (const pm_rescue_node_t *) node;
3315 scope->body = (pm_node_t *) cast->statements;
3316 break;
3317 }
3318 case PM_RESCUE_MODIFIER_NODE: {
3319 const pm_rescue_modifier_node_t *cast = (const pm_rescue_modifier_node_t *) node;
3320 scope->body = (pm_node_t *) cast->rescue_expression;
3321 break;
3322 }
3323 case PM_SINGLETON_CLASS_NODE: {
3324 const pm_singleton_class_node_t *cast = (const pm_singleton_class_node_t *) node;
3325 scope->body = cast->body;
3326 scope->locals = cast->locals;
3327 break;
3328 }
3329 case PM_STATEMENTS_NODE: {
3330 const pm_statements_node_t *cast = (const pm_statements_node_t *) node;
3331 scope->body = (pm_node_t *) cast;
3332 break;
3333 }
3334 default:
3335 rb_bug("unreachable");
3336 break;
3337 }
3338}
3339
3340void
3341pm_scope_node_destroy(pm_scope_node_t *scope_node)
3342{
3343 if (scope_node->index_lookup_table) {
3344 st_free_table(scope_node->index_lookup_table);
3345 }
3346}
3347
3359static void
3360pm_compile_retry_end_label(rb_iseq_t *iseq, LINK_ANCHOR *const ret, LABEL *retry_end_l)
3361{
3362 INSN *iobj;
3363 LINK_ELEMENT *last_elem = LAST_ELEMENT(ret);
3364 iobj = IS_INSN(last_elem) ? (INSN*) last_elem : (INSN*) get_prev_insn((INSN*) last_elem);
3365 while (!IS_INSN_ID(iobj, send) && !IS_INSN_ID(iobj, invokesuper) && !IS_INSN_ID(iobj, sendforward) && !IS_INSN_ID(iobj, invokesuperforward)) {
3366 iobj = (INSN*) get_prev_insn(iobj);
3367 }
3368 ELEM_INSERT_NEXT(&iobj->link, (LINK_ELEMENT*) retry_end_l);
3369
3370 // LINK_ANCHOR has a pointer to the last element, but
3371 // ELEM_INSERT_NEXT does not update it even if we add an insn to the
3372 // last of LINK_ANCHOR. So this updates it manually.
3373 if (&iobj->link == LAST_ELEMENT(ret)) {
3374 ret->last = (LINK_ELEMENT*) retry_end_l;
3375 }
3376}
3377
3378static const char *
3379pm_iseq_builtin_function_name(const pm_scope_node_t *scope_node, const pm_node_t *receiver, ID method_id)
3380{
3381 const char *name = rb_id2name(method_id);
3382 static const char prefix[] = "__builtin_";
3383 const size_t prefix_len = sizeof(prefix) - 1;
3384
3385 if (receiver == NULL) {
3386 if (UNLIKELY(strncmp(prefix, name, prefix_len) == 0)) {
3387 // __builtin_foo
3388 return &name[prefix_len];
3389 }
3390 }
3391 else if (PM_NODE_TYPE_P(receiver, PM_CALL_NODE)) {
3392 if (PM_NODE_FLAG_P(receiver, PM_CALL_NODE_FLAGS_VARIABLE_CALL)) {
3393 const pm_call_node_t *cast = (const pm_call_node_t *) receiver;
3394 if (pm_constant_id_lookup(scope_node, cast->name) == rb_intern_const("__builtin")) {
3395 // __builtin.foo
3396 return name;
3397 }
3398 }
3399 }
3400 else if (PM_NODE_TYPE_P(receiver, PM_CONSTANT_READ_NODE)) {
3401 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) receiver;
3402 if (pm_constant_id_lookup(scope_node, cast->name) == rb_intern_const("Primitive")) {
3403 // Primitive.foo
3404 return name;
3405 }
3406 }
3407
3408 return NULL;
3409}
3410
3411// Compile Primitive.attr! :leaf, ...
3412static int
3413pm_compile_builtin_attr(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_arguments_node_t *arguments, const pm_node_location_t *node_location)
3414{
3415 if (arguments == NULL) {
3416 COMPILE_ERROR(iseq, node_location->line, "attr!: no argument");
3417 return COMPILE_NG;
3418 }
3419
3420 const pm_node_t *argument;
3421 PM_NODE_LIST_FOREACH(&arguments->arguments, index, argument) {
3422 if (!PM_NODE_TYPE_P(argument, PM_SYMBOL_NODE)) {
3423 COMPILE_ERROR(iseq, node_location->line, "non symbol argument to attr!: %s", pm_node_type_to_str(PM_NODE_TYPE(argument)));
3424 return COMPILE_NG;
3425 }
3426
3427 VALUE symbol = pm_static_literal_value(iseq, argument, scope_node);
3428 VALUE string = rb_sym2str(symbol);
3429
3430 if (strcmp(RSTRING_PTR(string), "leaf") == 0) {
3431 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_LEAF;
3432 }
3433 else if (strcmp(RSTRING_PTR(string), "inline_block") == 0) {
3434 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_INLINE_BLOCK;
3435 }
3436 else if (strcmp(RSTRING_PTR(string), "use_block") == 0) {
3437 iseq_set_use_block(iseq);
3438 }
3439 else if (strcmp(RSTRING_PTR(string), "c_trace") == 0) {
3440 // Let the iseq act like a C method in backtraces
3441 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_C_TRACE;
3442 }
3443 else {
3444 COMPILE_ERROR(iseq, node_location->line, "unknown argument to attr!: %s", RSTRING_PTR(string));
3445 return COMPILE_NG;
3446 }
3447 }
3448
3449 return COMPILE_OK;
3450}
3451
3452static int
3453pm_compile_builtin_arg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node, const pm_arguments_node_t *arguments, const pm_node_location_t *node_location, int popped)
3454{
3455 if (arguments == NULL) {
3456 COMPILE_ERROR(iseq, node_location->line, "arg!: no argument");
3457 return COMPILE_NG;
3458 }
3459
3460 if (arguments->arguments.size != 1) {
3461 COMPILE_ERROR(iseq, node_location->line, "arg!: too many argument");
3462 return COMPILE_NG;
3463 }
3464
3465 const pm_node_t *argument = arguments->arguments.nodes[0];
3466 if (!PM_NODE_TYPE_P(argument, PM_SYMBOL_NODE)) {
3467 COMPILE_ERROR(iseq, node_location->line, "non symbol argument to arg!: %s", pm_node_type_to_str(PM_NODE_TYPE(argument)));
3468 return COMPILE_NG;
3469 }
3470
3471 if (!popped) {
3472 ID name = parse_string_symbol(scope_node, ((const pm_symbol_node_t *) argument));
3473 int index = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size - get_local_var_idx(iseq, name);
3474
3475 debugs("id: %s idx: %d\n", rb_id2name(name), index);
3476 PUSH_GETLOCAL(ret, *node_location, index, get_lvar_level(iseq));
3477 }
3478
3479 return COMPILE_OK;
3480}
3481
3482static int
3483pm_compile_builtin_mandatory_only_method(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_call_node_t *call_node, const pm_node_location_t *node_location)
3484{
3485 const pm_node_t *ast_node = scope_node->ast_node;
3486 if (!PM_NODE_TYPE_P(ast_node, PM_DEF_NODE)) {
3487 rb_bug("mandatory_only?: not in method definition");
3488 return COMPILE_NG;
3489 }
3490
3491 const pm_def_node_t *def_node = (const pm_def_node_t *) ast_node;
3492 const pm_parameters_node_t *parameters_node = def_node->parameters;
3493 if (parameters_node == NULL) {
3494 rb_bug("mandatory_only?: in method definition with no parameters");
3495 return COMPILE_NG;
3496 }
3497
3498 const pm_node_t *body_node = def_node->body;
3499 if (body_node == NULL || !PM_NODE_TYPE_P(body_node, PM_STATEMENTS_NODE) || (((const pm_statements_node_t *) body_node)->body.size != 1) || !PM_NODE_TYPE_P(((const pm_statements_node_t *) body_node)->body.nodes[0], PM_IF_NODE)) {
3500 rb_bug("mandatory_only?: not in method definition with plain statements");
3501 return COMPILE_NG;
3502 }
3503
3504 const pm_if_node_t *if_node = (const pm_if_node_t *) ((const pm_statements_node_t *) body_node)->body.nodes[0];
3505 if (if_node->predicate != ((const pm_node_t *) call_node)) {
3506 rb_bug("mandatory_only?: can't find mandatory node");
3507 return COMPILE_NG;
3508 }
3509
3510 pm_parameters_node_t parameters = {
3511 .base = parameters_node->base,
3512 .requireds = parameters_node->requireds
3513 };
3514
3515 const pm_def_node_t def = {
3516 .base = def_node->base,
3517 .name = def_node->name,
3518 .receiver = def_node->receiver,
3519 .parameters = &parameters,
3520 .body = (pm_node_t *) if_node->statements,
3521 .locals = {
3522 .ids = def_node->locals.ids,
3523 .size = parameters_node->requireds.size,
3524 .capacity = def_node->locals.capacity
3525 }
3526 };
3527
3528 pm_scope_node_t next_scope_node;
3529 pm_scope_node_init(&def.base, &next_scope_node, scope_node);
3530
3531 int error_state;
3532 const rb_iseq_t *mandatory_only_iseq = pm_iseq_new_with_opt(
3533 &next_scope_node,
3534 rb_iseq_base_label(iseq),
3535 rb_iseq_path(iseq),
3536 rb_iseq_realpath(iseq),
3537 node_location->line,
3538 NULL,
3539 0,
3540 ISEQ_TYPE_METHOD,
3541 ISEQ_COMPILE_DATA(iseq)->option,
3542 &error_state
3543 );
3544 RB_OBJ_WRITE(iseq, &ISEQ_BODY(iseq)->mandatory_only_iseq, (VALUE)mandatory_only_iseq);
3545
3546 if (error_state) {
3547 RUBY_ASSERT(ISEQ_BODY(iseq)->mandatory_only_iseq == NULL);
3548 rb_jump_tag(error_state);
3549 }
3550
3551 pm_scope_node_destroy(&next_scope_node);
3552 return COMPILE_OK;
3553}
3554
3555static int
3556pm_compile_builtin_function_call(rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_call_node_t *call_node, const pm_node_location_t *node_location, int popped, const rb_iseq_t *parent_block, const char *builtin_func)
3557{
3558 const pm_arguments_node_t *arguments = call_node->arguments;
3559
3560 if (parent_block != NULL) {
3561 COMPILE_ERROR(iseq, node_location->line, "should not call builtins here.");
3562 return COMPILE_NG;
3563 }
3564
3565#define BUILTIN_INLINE_PREFIX "_bi"
3566 char inline_func[sizeof(BUILTIN_INLINE_PREFIX) + DECIMAL_SIZE_OF(int)];
3567 bool cconst = false;
3568retry:;
3569 const struct rb_builtin_function *bf = iseq_builtin_function_lookup(iseq, builtin_func);
3570
3571 if (bf == NULL) {
3572 if (strcmp("cstmt!", builtin_func) == 0 || strcmp("cexpr!", builtin_func) == 0) {
3573 // ok
3574 }
3575 else if (strcmp("cconst!", builtin_func) == 0) {
3576 cconst = true;
3577 }
3578 else if (strcmp("cinit!", builtin_func) == 0) {
3579 // ignore
3580 return COMPILE_OK;
3581 }
3582 else if (strcmp("attr!", builtin_func) == 0) {
3583 return pm_compile_builtin_attr(iseq, scope_node, arguments, node_location);
3584 }
3585 else if (strcmp("arg!", builtin_func) == 0) {
3586 return pm_compile_builtin_arg(iseq, ret, scope_node, arguments, node_location, popped);
3587 }
3588 else if (strcmp("mandatory_only?", builtin_func) == 0) {
3589 if (popped) {
3590 rb_bug("mandatory_only? should be in if condition");
3591 }
3592 else if (!LIST_INSN_SIZE_ZERO(ret)) {
3593 rb_bug("mandatory_only? should be put on top");
3594 }
3595
3596 PUSH_INSN1(ret, *node_location, putobject, Qfalse);
3597 return pm_compile_builtin_mandatory_only_method(iseq, scope_node, call_node, node_location);
3598 }
3599 else if (1) {
3600 rb_bug("can't find builtin function:%s", builtin_func);
3601 }
3602 else {
3603 COMPILE_ERROR(iseq, node_location->line, "can't find builtin function:%s", builtin_func);
3604 return COMPILE_NG;
3605 }
3606
3607 int inline_index = node_location->line;
3608 snprintf(inline_func, sizeof(inline_func), BUILTIN_INLINE_PREFIX "%d", inline_index);
3609 builtin_func = inline_func;
3610 arguments = NULL;
3611 goto retry;
3612 }
3613
3614 if (cconst) {
3615 typedef VALUE(*builtin_func0)(void *, VALUE);
3616 VALUE const_val = (*(builtin_func0)(uintptr_t)bf->func_ptr)(NULL, Qnil);
3617 PUSH_INSN1(ret, *node_location, putobject, const_val);
3618 return COMPILE_OK;
3619 }
3620
3621 // fprintf(stderr, "func_name:%s -> %p\n", builtin_func, bf->func_ptr);
3622
3623 DECL_ANCHOR(args_seq);
3624
3625 int flags = 0;
3626 struct rb_callinfo_kwarg *keywords = NULL;
3627 int argc = pm_setup_args(arguments, call_node->block, &flags, &keywords, iseq, args_seq, scope_node, node_location);
3628
3629 if (argc != bf->argc) {
3630 COMPILE_ERROR(iseq, node_location->line, "argc is not match for builtin function:%s (expect %d but %d)", builtin_func, bf->argc, argc);
3631 return COMPILE_NG;
3632 }
3633
3634 unsigned int start_index;
3635 if (delegate_call_p(iseq, argc, args_seq, &start_index)) {
3636 PUSH_INSN2(ret, *node_location, opt_invokebuiltin_delegate, bf, INT2FIX(start_index));
3637 }
3638 else {
3639 PUSH_SEQ(ret, args_seq);
3640 PUSH_INSN1(ret, *node_location, invokebuiltin, bf);
3641 }
3642
3643 if (popped) PUSH_INSN(ret, *node_location, pop);
3644 return COMPILE_OK;
3645}
3646
3650static void
3651pm_compile_call(rb_iseq_t *iseq, const pm_call_node_t *call_node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, ID method_id, LABEL *start)
3652{
3653 const pm_location_t *message_loc = &call_node->message_loc;
3654 if (message_loc->start == NULL) message_loc = &call_node->base.location;
3655
3656 const pm_node_location_t location = PM_LOCATION_START_LOCATION(scope_node->parser, message_loc, call_node->base.node_id);
3657
3658 LABEL *else_label = NEW_LABEL(location.line);
3659 LABEL *end_label = NEW_LABEL(location.line);
3660 LABEL *retry_end_l = NEW_LABEL(location.line);
3661
3662 VALUE branches = Qfalse;
3663 rb_code_location_t code_location = { 0 };
3664 int node_id = location.node_id;
3665
3666 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
3667 if (PM_BRANCH_COVERAGE_P(iseq)) {
3668 const uint8_t *cursors[3] = {
3669 call_node->closing_loc.end,
3670 call_node->arguments == NULL ? NULL : call_node->arguments->base.location.end,
3671 call_node->message_loc.end
3672 };
3673
3674 const uint8_t *end_cursor = cursors[0];
3675 end_cursor = (end_cursor == NULL || cursors[1] == NULL) ? cursors[1] : (end_cursor > cursors[1] ? end_cursor : cursors[1]);
3676 end_cursor = (end_cursor == NULL || cursors[2] == NULL) ? cursors[2] : (end_cursor > cursors[2] ? end_cursor : cursors[2]);
3677 if (!end_cursor) end_cursor = call_node->closing_loc.end;
3678
3679 const pm_line_column_t start_location = PM_NODE_START_LINE_COLUMN(scope_node->parser, call_node);
3680 const pm_line_column_t end_location = pm_newline_list_line_column(&scope_node->parser->newline_list, end_cursor, scope_node->parser->start_line);
3681
3682 code_location = (rb_code_location_t) {
3683 .beg_pos = { .lineno = start_location.line, .column = start_location.column },
3684 .end_pos = { .lineno = end_location.line, .column = end_location.column }
3685 };
3686
3687 branches = decl_branch_base(iseq, PTR2NUM(call_node), &code_location, "&.");
3688 }
3689
3690 PUSH_INSN(ret, location, dup);
3691 PUSH_INSNL(ret, location, branchnil, else_label);
3692
3693 add_trace_branch_coverage(iseq, ret, &code_location, node_id, 0, "then", branches);
3694 }
3695
3696 LINK_ELEMENT *opt_new_prelude = LAST_ELEMENT(ret);
3697
3698 int flags = 0;
3699 struct rb_callinfo_kwarg *kw_arg = NULL;
3700
3701 int orig_argc = pm_setup_args(call_node->arguments, call_node->block, &flags, &kw_arg, iseq, ret, scope_node, &location);
3702 const rb_iseq_t *previous_block = ISEQ_COMPILE_DATA(iseq)->current_block;
3703 const rb_iseq_t *block_iseq = NULL;
3704
3705 if (call_node->block != NULL && PM_NODE_TYPE_P(call_node->block, PM_BLOCK_NODE)) {
3706 // Scope associated with the block
3707 pm_scope_node_t next_scope_node;
3708 pm_scope_node_init(call_node->block, &next_scope_node, scope_node);
3709
3710 block_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, pm_node_line_number(scope_node->parser, call_node->block));
3711 pm_scope_node_destroy(&next_scope_node);
3712 ISEQ_COMPILE_DATA(iseq)->current_block = block_iseq;
3713 }
3714 else {
3715 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_VARIABLE_CALL)) {
3716 flags |= VM_CALL_VCALL;
3717 }
3718
3719 if (!flags) {
3720 flags |= VM_CALL_ARGS_SIMPLE;
3721 }
3722 }
3723
3724 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) {
3725 flags |= VM_CALL_FCALL;
3726 }
3727
3728 if (!popped && PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE)) {
3729 if (flags & VM_CALL_ARGS_BLOCKARG) {
3730 PUSH_INSN1(ret, location, topn, INT2FIX(1));
3731 if (flags & VM_CALL_ARGS_SPLAT) {
3732 PUSH_INSN1(ret, location, putobject, INT2FIX(-1));
3733 PUSH_SEND_WITH_FLAG(ret, location, idAREF, INT2FIX(1), INT2FIX(0));
3734 }
3735 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 3));
3736 PUSH_INSN(ret, location, pop);
3737 }
3738 else if (flags & VM_CALL_ARGS_SPLAT) {
3739 PUSH_INSN(ret, location, dup);
3740 PUSH_INSN1(ret, location, putobject, INT2FIX(-1));
3741 PUSH_SEND_WITH_FLAG(ret, location, idAREF, INT2FIX(1), INT2FIX(0));
3742 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 2));
3743 PUSH_INSN(ret, location, pop);
3744 }
3745 else {
3746 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 1));
3747 }
3748 }
3749
3750 if ((flags & VM_CALL_KW_SPLAT) && (flags & VM_CALL_ARGS_BLOCKARG) && !(flags & VM_CALL_KW_SPLAT_MUT)) {
3751 PUSH_INSN(ret, location, splatkw);
3752 }
3753
3754 LABEL *not_basic_new = NEW_LABEL(location.line);
3755 LABEL *not_basic_new_finish = NEW_LABEL(location.line);
3756
3757 bool inline_new = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction &&
3758 method_id == rb_intern("new") &&
3759 call_node->block == NULL &&
3760 (flags & VM_CALL_ARGS_BLOCKARG) == 0;
3761
3762 if (inline_new) {
3763 if (LAST_ELEMENT(ret) == opt_new_prelude) {
3764 PUSH_INSN(ret, location, putnil);
3765 PUSH_INSN(ret, location, swap);
3766 }
3767 else {
3768 ELEM_INSERT_NEXT(opt_new_prelude, &new_insn_body(iseq, location.line, location.node_id, BIN(swap), 0)->link);
3769 ELEM_INSERT_NEXT(opt_new_prelude, &new_insn_body(iseq, location.line, location.node_id, BIN(putnil), 0)->link);
3770 }
3771
3772 // Jump unless the receiver uses the "basic" implementation of "new"
3773 VALUE ci;
3774 if (flags & VM_CALL_FORWARDING) {
3775 ci = (VALUE)new_callinfo(iseq, method_id, orig_argc + 1, flags, kw_arg, 0);
3776 }
3777 else {
3778 ci = (VALUE)new_callinfo(iseq, method_id, orig_argc, flags, kw_arg, 0);
3779 }
3780
3781 PUSH_INSN2(ret, location, opt_new, ci, not_basic_new);
3782 LABEL_REF(not_basic_new);
3783 // optimized path
3784 PUSH_SEND_R(ret, location, rb_intern("initialize"), INT2FIX(orig_argc), block_iseq, INT2FIX(flags | VM_CALL_FCALL), kw_arg);
3785 PUSH_INSNL(ret, location, jump, not_basic_new_finish);
3786
3787 PUSH_LABEL(ret, not_basic_new);
3788 // Fall back to normal send
3789 PUSH_SEND_R(ret, location, method_id, INT2FIX(orig_argc), block_iseq, INT2FIX(flags), kw_arg);
3790 PUSH_INSN(ret, location, swap);
3791
3792 PUSH_LABEL(ret, not_basic_new_finish);
3793 PUSH_INSN(ret, location, pop);
3794 }
3795 else {
3796 PUSH_SEND_R(ret, location, method_id, INT2FIX(orig_argc), block_iseq, INT2FIX(flags), kw_arg);
3797 }
3798
3799 if (block_iseq && ISEQ_BODY(block_iseq)->catch_table) {
3800 pm_compile_retry_end_label(iseq, ret, retry_end_l);
3801 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, start, retry_end_l, block_iseq, retry_end_l);
3802 }
3803
3804 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
3805 PUSH_INSNL(ret, location, jump, end_label);
3806 PUSH_LABEL(ret, else_label);
3807 add_trace_branch_coverage(iseq, ret, &code_location, node_id, 1, "else", branches);
3808 PUSH_LABEL(ret, end_label);
3809 }
3810
3811 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE) && !popped) {
3812 PUSH_INSN(ret, location, pop);
3813 }
3814
3815 if (popped) PUSH_INSN(ret, location, pop);
3816 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
3817}
3818
3823static inline VALUE
3824pm_compile_back_reference_ref(const pm_back_reference_read_node_t *node)
3825{
3826 const char *type = (const char *) (node->base.location.start + 1);
3827
3828 // Since a back reference is `$<char>`, Ruby represents the ID as an
3829 // rb_intern on the value after the `$`.
3830 return INT2FIX(rb_intern2(type, 1)) << 1 | 1;
3831}
3832
3837static inline VALUE
3838pm_compile_numbered_reference_ref(const pm_numbered_reference_read_node_t *node)
3839{
3840 return INT2FIX(node->number << 1);
3841}
3842
3843static void
3844pm_compile_defined_expr0(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition, LABEL **lfinish, bool explicit_receiver)
3845{
3846#define PUSH_VAL(type) (in_condition ? Qtrue : rb_iseq_defined_string(type))
3847
3848 // in_condition is the same as compile.c's needstr
3849 enum defined_type dtype = DEFINED_NOT_DEFINED;
3850 const pm_node_location_t location = *node_location;
3851
3852 switch (PM_NODE_TYPE(node)) {
3853/* DEFINED_NIL ****************************************************************/
3854 case PM_NIL_NODE:
3855 // defined?(nil)
3856 // ^^^
3857 dtype = DEFINED_NIL;
3858 break;
3859/* DEFINED_IVAR ***************************************************************/
3860 case PM_INSTANCE_VARIABLE_READ_NODE: {
3861 // defined?(@a)
3862 // ^^
3864 ID name = pm_constant_id_lookup(scope_node, cast->name);
3865
3866 PUSH_INSN3(ret, location, definedivar, ID2SYM(name), get_ivar_ic_value(iseq, name), PUSH_VAL(DEFINED_IVAR));
3867
3868 return;
3869 }
3870/* DEFINED_LVAR ***************************************************************/
3871 case PM_LOCAL_VARIABLE_READ_NODE:
3872 // a = 1; defined?(a)
3873 // ^
3874 case PM_IT_LOCAL_VARIABLE_READ_NODE:
3875 // 1.then { defined?(it) }
3876 // ^^
3877 dtype = DEFINED_LVAR;
3878 break;
3879/* DEFINED_GVAR ***************************************************************/
3880 case PM_GLOBAL_VARIABLE_READ_NODE: {
3881 // defined?($a)
3882 // ^^
3884 ID name = pm_constant_id_lookup(scope_node, cast->name);
3885
3886 PUSH_INSN(ret, location, putnil);
3887 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_GVAR), ID2SYM(name), PUSH_VAL(DEFINED_GVAR));
3888
3889 return;
3890 }
3891/* DEFINED_CVAR ***************************************************************/
3892 case PM_CLASS_VARIABLE_READ_NODE: {
3893 // defined?(@@a)
3894 // ^^^
3896 ID name = pm_constant_id_lookup(scope_node, cast->name);
3897
3898 PUSH_INSN(ret, location, putnil);
3899 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CVAR), ID2SYM(name), PUSH_VAL(DEFINED_CVAR));
3900
3901 return;
3902 }
3903/* DEFINED_CONST **************************************************************/
3904 case PM_CONSTANT_READ_NODE: {
3905 // defined?(A)
3906 // ^
3907 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
3908 ID name = pm_constant_id_lookup(scope_node, cast->name);
3909
3910 PUSH_INSN(ret, location, putnil);
3911 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST), ID2SYM(name), PUSH_VAL(DEFINED_CONST));
3912
3913 return;
3914 }
3915/* DEFINED_YIELD **************************************************************/
3916 case PM_YIELD_NODE:
3917 // defined?(yield)
3918 // ^^^^^
3919 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
3920
3921 PUSH_INSN(ret, location, putnil);
3922 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_YIELD), 0, PUSH_VAL(DEFINED_YIELD));
3923
3924 return;
3925/* DEFINED_ZSUPER *************************************************************/
3926 case PM_SUPER_NODE: {
3927 // defined?(super 1, 2)
3928 // ^^^^^^^^^^
3929 const pm_super_node_t *cast = (const pm_super_node_t *) node;
3930
3931 if (cast->block != NULL && !PM_NODE_TYPE_P(cast->block, PM_BLOCK_ARGUMENT_NODE)) {
3932 dtype = DEFINED_EXPR;
3933 break;
3934 }
3935
3936 PUSH_INSN(ret, location, putnil);
3937 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_ZSUPER), 0, PUSH_VAL(DEFINED_ZSUPER));
3938 return;
3939 }
3940 case PM_FORWARDING_SUPER_NODE: {
3941 // defined?(super)
3942 // ^^^^^
3943 const pm_forwarding_super_node_t *cast = (const pm_forwarding_super_node_t *) node;
3944
3945 if (cast->block != NULL) {
3946 dtype = DEFINED_EXPR;
3947 break;
3948 }
3949
3950 PUSH_INSN(ret, location, putnil);
3951 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_ZSUPER), 0, PUSH_VAL(DEFINED_ZSUPER));
3952 return;
3953 }
3954/* DEFINED_SELF ***************************************************************/
3955 case PM_SELF_NODE:
3956 // defined?(self)
3957 // ^^^^
3958 dtype = DEFINED_SELF;
3959 break;
3960/* DEFINED_TRUE ***************************************************************/
3961 case PM_TRUE_NODE:
3962 // defined?(true)
3963 // ^^^^
3964 dtype = DEFINED_TRUE;
3965 break;
3966/* DEFINED_FALSE **************************************************************/
3967 case PM_FALSE_NODE:
3968 // defined?(false)
3969 // ^^^^^
3970 dtype = DEFINED_FALSE;
3971 break;
3972/* DEFINED_ASGN ***************************************************************/
3973 case PM_CALL_AND_WRITE_NODE:
3974 // defined?(a.a &&= 1)
3975 // ^^^^^^^^^
3976 case PM_CALL_OPERATOR_WRITE_NODE:
3977 // defined?(a.a += 1)
3978 // ^^^^^^^^
3979 case PM_CALL_OR_WRITE_NODE:
3980 // defined?(a.a ||= 1)
3981 // ^^^^^^^^^
3982 case PM_CLASS_VARIABLE_AND_WRITE_NODE:
3983 // defined?(@@a &&= 1)
3984 // ^^^^^^^^^
3985 case PM_CLASS_VARIABLE_OPERATOR_WRITE_NODE:
3986 // defined?(@@a += 1)
3987 // ^^^^^^^^
3988 case PM_CLASS_VARIABLE_OR_WRITE_NODE:
3989 // defined?(@@a ||= 1)
3990 // ^^^^^^^^^
3991 case PM_CLASS_VARIABLE_WRITE_NODE:
3992 // defined?(@@a = 1)
3993 // ^^^^^^^
3994 case PM_CONSTANT_AND_WRITE_NODE:
3995 // defined?(A &&= 1)
3996 // ^^^^^^^
3997 case PM_CONSTANT_OPERATOR_WRITE_NODE:
3998 // defined?(A += 1)
3999 // ^^^^^^
4000 case PM_CONSTANT_OR_WRITE_NODE:
4001 // defined?(A ||= 1)
4002 // ^^^^^^^
4003 case PM_CONSTANT_PATH_AND_WRITE_NODE:
4004 // defined?(A::A &&= 1)
4005 // ^^^^^^^^^^
4006 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE:
4007 // defined?(A::A += 1)
4008 // ^^^^^^^^^
4009 case PM_CONSTANT_PATH_OR_WRITE_NODE:
4010 // defined?(A::A ||= 1)
4011 // ^^^^^^^^^^
4012 case PM_CONSTANT_PATH_WRITE_NODE:
4013 // defined?(A::A = 1)
4014 // ^^^^^^^^
4015 case PM_CONSTANT_WRITE_NODE:
4016 // defined?(A = 1)
4017 // ^^^^^
4018 case PM_GLOBAL_VARIABLE_AND_WRITE_NODE:
4019 // defined?($a &&= 1)
4020 // ^^^^^^^^
4021 case PM_GLOBAL_VARIABLE_OPERATOR_WRITE_NODE:
4022 // defined?($a += 1)
4023 // ^^^^^^^
4024 case PM_GLOBAL_VARIABLE_OR_WRITE_NODE:
4025 // defined?($a ||= 1)
4026 // ^^^^^^^^
4027 case PM_GLOBAL_VARIABLE_WRITE_NODE:
4028 // defined?($a = 1)
4029 // ^^^^^^
4030 case PM_INDEX_AND_WRITE_NODE:
4031 // defined?(a[1] &&= 1)
4032 // ^^^^^^^^^^
4033 case PM_INDEX_OPERATOR_WRITE_NODE:
4034 // defined?(a[1] += 1)
4035 // ^^^^^^^^^
4036 case PM_INDEX_OR_WRITE_NODE:
4037 // defined?(a[1] ||= 1)
4038 // ^^^^^^^^^^
4039 case PM_INSTANCE_VARIABLE_AND_WRITE_NODE:
4040 // defined?(@a &&= 1)
4041 // ^^^^^^^^
4042 case PM_INSTANCE_VARIABLE_OPERATOR_WRITE_NODE:
4043 // defined?(@a += 1)
4044 // ^^^^^^^
4045 case PM_INSTANCE_VARIABLE_OR_WRITE_NODE:
4046 // defined?(@a ||= 1)
4047 // ^^^^^^^^
4048 case PM_INSTANCE_VARIABLE_WRITE_NODE:
4049 // defined?(@a = 1)
4050 // ^^^^^^
4051 case PM_LOCAL_VARIABLE_AND_WRITE_NODE:
4052 // defined?(a &&= 1)
4053 // ^^^^^^^
4054 case PM_LOCAL_VARIABLE_OPERATOR_WRITE_NODE:
4055 // defined?(a += 1)
4056 // ^^^^^^
4057 case PM_LOCAL_VARIABLE_OR_WRITE_NODE:
4058 // defined?(a ||= 1)
4059 // ^^^^^^^
4060 case PM_LOCAL_VARIABLE_WRITE_NODE:
4061 // defined?(a = 1)
4062 // ^^^^^
4063 case PM_MULTI_WRITE_NODE:
4064 // defined?((a, = 1))
4065 // ^^^^^^
4066 dtype = DEFINED_ASGN;
4067 break;
4068/* DEFINED_EXPR ***************************************************************/
4069 case PM_ALIAS_GLOBAL_VARIABLE_NODE:
4070 // defined?((alias $a $b))
4071 // ^^^^^^^^^^^
4072 case PM_ALIAS_METHOD_NODE:
4073 // defined?((alias a b))
4074 // ^^^^^^^^^
4075 case PM_AND_NODE:
4076 // defined?(a and b)
4077 // ^^^^^^^
4078 case PM_BREAK_NODE:
4079 // defined?(break 1)
4080 // ^^^^^^^
4081 case PM_CASE_MATCH_NODE:
4082 // defined?(case 1; in 1; end)
4083 // ^^^^^^^^^^^^^^^^^
4084 case PM_CASE_NODE:
4085 // defined?(case 1; when 1; end)
4086 // ^^^^^^^^^^^^^^^^^^^
4087 case PM_CLASS_NODE:
4088 // defined?(class Foo; end)
4089 // ^^^^^^^^^^^^^^
4090 case PM_DEF_NODE:
4091 // defined?(def a() end)
4092 // ^^^^^^^^^^^
4093 case PM_DEFINED_NODE:
4094 // defined?(defined?(a))
4095 // ^^^^^^^^^^^
4096 case PM_FLIP_FLOP_NODE:
4097 // defined?(not (a .. b))
4098 // ^^^^^^
4099 case PM_FLOAT_NODE:
4100 // defined?(1.0)
4101 // ^^^
4102 case PM_FOR_NODE:
4103 // defined?(for a in 1 do end)
4104 // ^^^^^^^^^^^^^^^^^
4105 case PM_IF_NODE:
4106 // defined?(if a then end)
4107 // ^^^^^^^^^^^^^
4108 case PM_IMAGINARY_NODE:
4109 // defined?(1i)
4110 // ^^
4111 case PM_INTEGER_NODE:
4112 // defined?(1)
4113 // ^
4114 case PM_INTERPOLATED_MATCH_LAST_LINE_NODE:
4115 // defined?(not /#{1}/)
4116 // ^^^^^^
4117 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE:
4118 // defined?(/#{1}/)
4119 // ^^^^^^
4120 case PM_INTERPOLATED_STRING_NODE:
4121 // defined?("#{1}")
4122 // ^^^^^^
4123 case PM_INTERPOLATED_SYMBOL_NODE:
4124 // defined?(:"#{1}")
4125 // ^^^^^^^
4126 case PM_INTERPOLATED_X_STRING_NODE:
4127 // defined?(`#{1}`)
4128 // ^^^^^^
4129 case PM_LAMBDA_NODE:
4130 // defined?(-> {})
4131 // ^^^^^
4132 case PM_MATCH_LAST_LINE_NODE:
4133 // defined?(not //)
4134 // ^^^^^^
4135 case PM_MATCH_PREDICATE_NODE:
4136 // defined?(1 in 1)
4137 // ^^^^^^
4138 case PM_MATCH_REQUIRED_NODE:
4139 // defined?(1 => 1)
4140 // ^^^^^^
4141 case PM_MATCH_WRITE_NODE:
4142 // defined?(/(?<a>)/ =~ "")
4143 // ^^^^^^^^^^^^^^
4144 case PM_MODULE_NODE:
4145 // defined?(module A end)
4146 // ^^^^^^^^^^^^
4147 case PM_NEXT_NODE:
4148 // defined?(next 1)
4149 // ^^^^^^
4150 case PM_OR_NODE:
4151 // defined?(a or b)
4152 // ^^^^^^
4153 case PM_POST_EXECUTION_NODE:
4154 // defined?((END {}))
4155 // ^^^^^^^^
4156 case PM_RANGE_NODE:
4157 // defined?(1..1)
4158 // ^^^^
4159 case PM_RATIONAL_NODE:
4160 // defined?(1r)
4161 // ^^
4162 case PM_REDO_NODE:
4163 // defined?(redo)
4164 // ^^^^
4165 case PM_REGULAR_EXPRESSION_NODE:
4166 // defined?(//)
4167 // ^^
4168 case PM_RESCUE_MODIFIER_NODE:
4169 // defined?(a rescue b)
4170 // ^^^^^^^^^^
4171 case PM_RETRY_NODE:
4172 // defined?(retry)
4173 // ^^^^^
4174 case PM_RETURN_NODE:
4175 // defined?(return)
4176 // ^^^^^^
4177 case PM_SINGLETON_CLASS_NODE:
4178 // defined?(class << self; end)
4179 // ^^^^^^^^^^^^^^^^^^
4180 case PM_SOURCE_ENCODING_NODE:
4181 // defined?(__ENCODING__)
4182 // ^^^^^^^^^^^^
4183 case PM_SOURCE_FILE_NODE:
4184 // defined?(__FILE__)
4185 // ^^^^^^^^
4186 case PM_SOURCE_LINE_NODE:
4187 // defined?(__LINE__)
4188 // ^^^^^^^^
4189 case PM_STRING_NODE:
4190 // defined?("")
4191 // ^^
4192 case PM_SYMBOL_NODE:
4193 // defined?(:a)
4194 // ^^
4195 case PM_UNDEF_NODE:
4196 // defined?((undef a))
4197 // ^^^^^^^
4198 case PM_UNLESS_NODE:
4199 // defined?(unless a then end)
4200 // ^^^^^^^^^^^^^^^^^
4201 case PM_UNTIL_NODE:
4202 // defined?(until a do end)
4203 // ^^^^^^^^^^^^^^
4204 case PM_WHILE_NODE:
4205 // defined?(while a do end)
4206 // ^^^^^^^^^^^^^^
4207 case PM_X_STRING_NODE:
4208 // defined?(``)
4209 // ^^
4210 dtype = DEFINED_EXPR;
4211 break;
4212/* DEFINED_REF ****************************************************************/
4213 case PM_BACK_REFERENCE_READ_NODE: {
4214 // defined?($+)
4215 // ^^
4217 VALUE ref = pm_compile_back_reference_ref(cast);
4218
4219 PUSH_INSN(ret, location, putnil);
4220 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_REF), ref, PUSH_VAL(DEFINED_GVAR));
4221
4222 return;
4223 }
4224 case PM_NUMBERED_REFERENCE_READ_NODE: {
4225 // defined?($1)
4226 // ^^
4228 VALUE ref = pm_compile_numbered_reference_ref(cast);
4229
4230 PUSH_INSN(ret, location, putnil);
4231 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_REF), ref, PUSH_VAL(DEFINED_GVAR));
4232
4233 return;
4234 }
4235/* DEFINED_CONST_FROM *********************************************************/
4236 case PM_CONSTANT_PATH_NODE: {
4237 // defined?(A::A)
4238 // ^^^^
4239 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
4240 ID name = pm_constant_id_lookup(scope_node, cast->name);
4241
4242 if (cast->parent != NULL) {
4243 if (!lfinish[1]) lfinish[1] = NEW_LABEL(location.line);
4244 pm_compile_defined_expr0(iseq, cast->parent, node_location, ret, popped, scope_node, true, lfinish, false);
4245
4246 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4247 PM_COMPILE(cast->parent);
4248 }
4249 else {
4250 PUSH_INSN1(ret, location, putobject, rb_cObject);
4251 }
4252
4253 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST_FROM), ID2SYM(name), PUSH_VAL(DEFINED_CONST));
4254 return;
4255 }
4256/* Containers *****************************************************************/
4257 case PM_BEGIN_NODE: {
4258 // defined?(begin end)
4259 // ^^^^^^^^^
4260 const pm_begin_node_t *cast = (const pm_begin_node_t *) node;
4261
4262 if (cast->rescue_clause == NULL && cast->ensure_clause == NULL && cast->else_clause == NULL) {
4263 if (cast->statements == NULL) {
4264 // If we have empty statements, then we want to return "nil".
4265 dtype = DEFINED_NIL;
4266 }
4267 else if (cast->statements->body.size == 1) {
4268 // If we have a begin node that is wrapping a single statement
4269 // then we want to recurse down to that statement and compile
4270 // it.
4271 pm_compile_defined_expr0(iseq, cast->statements->body.nodes[0], node_location, ret, popped, scope_node, in_condition, lfinish, false);
4272 return;
4273 }
4274 else {
4275 // Otherwise, we have a begin wrapping multiple statements, in
4276 // which case this is defined as "expression".
4277 dtype = DEFINED_EXPR;
4278 }
4279 } else {
4280 // If we have any of the other clauses besides the main begin/end,
4281 // this is defined as "expression".
4282 dtype = DEFINED_EXPR;
4283 }
4284
4285 break;
4286 }
4287 case PM_PARENTHESES_NODE: {
4288 // defined?(())
4289 // ^^
4290 const pm_parentheses_node_t *cast = (const pm_parentheses_node_t *) node;
4291
4292 if (cast->body == NULL) {
4293 // If we have empty parentheses, then we want to return "nil".
4294 dtype = DEFINED_NIL;
4295 }
4296 else if (PM_NODE_TYPE_P(cast->body, PM_STATEMENTS_NODE) && !PM_NODE_FLAG_P(cast, PM_PARENTHESES_NODE_FLAGS_MULTIPLE_STATEMENTS)) {
4297 // If we have a parentheses node that is wrapping a single statement
4298 // then we want to recurse down to that statement and compile it.
4299 pm_compile_defined_expr0(iseq, ((const pm_statements_node_t *) cast->body)->body.nodes[0], node_location, ret, popped, scope_node, in_condition, lfinish, false);
4300 return;
4301 }
4302 else {
4303 // Otherwise, we have parentheses wrapping multiple statements, in
4304 // which case this is defined as "expression".
4305 dtype = DEFINED_EXPR;
4306 }
4307
4308 break;
4309 }
4310 case PM_ARRAY_NODE: {
4311 // defined?([])
4312 // ^^
4313 const pm_array_node_t *cast = (const pm_array_node_t *) node;
4314
4315 if (cast->elements.size > 0 && !lfinish[1]) {
4316 lfinish[1] = NEW_LABEL(location.line);
4317 }
4318
4319 for (size_t index = 0; index < cast->elements.size; index++) {
4320 pm_compile_defined_expr0(iseq, cast->elements.nodes[index], node_location, ret, popped, scope_node, true, lfinish, false);
4321 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4322 }
4323
4324 dtype = DEFINED_EXPR;
4325 break;
4326 }
4327 case PM_HASH_NODE:
4328 // defined?({ a: 1 })
4329 // ^^^^^^^^
4330 case PM_KEYWORD_HASH_NODE: {
4331 // defined?(a(a: 1))
4332 // ^^^^
4333 const pm_node_list_t *elements;
4334
4335 if (PM_NODE_TYPE_P(node, PM_HASH_NODE)) {
4336 elements = &((const pm_hash_node_t *) node)->elements;
4337 }
4338 else {
4339 elements = &((const pm_keyword_hash_node_t *) node)->elements;
4340 }
4341
4342 if (elements->size > 0 && !lfinish[1]) {
4343 lfinish[1] = NEW_LABEL(location.line);
4344 }
4345
4346 for (size_t index = 0; index < elements->size; index++) {
4347 pm_compile_defined_expr0(iseq, elements->nodes[index], node_location, ret, popped, scope_node, true, lfinish, false);
4348 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4349 }
4350
4351 dtype = DEFINED_EXPR;
4352 break;
4353 }
4354 case PM_ASSOC_NODE: {
4355 // defined?({ a: 1 })
4356 // ^^^^
4357 const pm_assoc_node_t *cast = (const pm_assoc_node_t *) node;
4358
4359 pm_compile_defined_expr0(iseq, cast->key, node_location, ret, popped, scope_node, true, lfinish, false);
4360 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4361 pm_compile_defined_expr0(iseq, cast->value, node_location, ret, popped, scope_node, true, lfinish, false);
4362
4363 return;
4364 }
4365 case PM_ASSOC_SPLAT_NODE: {
4366 // defined?({ **a })
4367 // ^^^^
4368 const pm_assoc_splat_node_t *cast = (const pm_assoc_splat_node_t *) node;
4369
4370 if (cast->value == NULL) {
4371 dtype = DEFINED_EXPR;
4372 break;
4373 }
4374
4375 pm_compile_defined_expr0(iseq, cast->value, node_location, ret, popped, scope_node, true, lfinish, false);
4376 return;
4377 }
4378 case PM_IMPLICIT_NODE: {
4379 // defined?({ a: })
4380 // ^^
4381 const pm_implicit_node_t *cast = (const pm_implicit_node_t *) node;
4382 pm_compile_defined_expr0(iseq, cast->value, node_location, ret, popped, scope_node, in_condition, lfinish, false);
4383 return;
4384 }
4385 case PM_CALL_NODE: {
4386#define BLOCK_P(cast) ((cast)->block != NULL && PM_NODE_TYPE_P((cast)->block, PM_BLOCK_NODE))
4387
4388 // defined?(a(1, 2, 3))
4389 // ^^^^^^^^^^
4390 const pm_call_node_t *cast = ((const pm_call_node_t *) node);
4391
4392 if (BLOCK_P(cast)) {
4393 dtype = DEFINED_EXPR;
4394 break;
4395 }
4396
4397 if (cast->receiver || cast->arguments || (cast->block && PM_NODE_TYPE_P(cast->block, PM_BLOCK_ARGUMENT_NODE))) {
4398 if (!lfinish[1]) lfinish[1] = NEW_LABEL(location.line);
4399 if (!lfinish[2]) lfinish[2] = NEW_LABEL(location.line);
4400 }
4401
4402 if (cast->arguments) {
4403 pm_compile_defined_expr0(iseq, (const pm_node_t *) cast->arguments, node_location, ret, popped, scope_node, true, lfinish, false);
4404 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4405 }
4406
4407 if (cast->block && PM_NODE_TYPE_P(cast->block, PM_BLOCK_ARGUMENT_NODE)) {
4408 pm_compile_defined_expr0(iseq, cast->block, node_location, ret, popped, scope_node, true, lfinish, false);
4409 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4410 }
4411
4412 if (cast->receiver) {
4413 if (PM_NODE_TYPE_P(cast->receiver, PM_CALL_NODE) && !BLOCK_P((const pm_call_node_t *) cast->receiver)) {
4414 // Special behavior here where we chain calls together. This is
4415 // the only path that sets explicit_receiver to true.
4416 pm_compile_defined_expr0(iseq, cast->receiver, node_location, ret, popped, scope_node, true, lfinish, true);
4417 PUSH_INSNL(ret, location, branchunless, lfinish[2]);
4418
4419 const pm_call_node_t *receiver = (const pm_call_node_t *) cast->receiver;
4420 ID method_id = pm_constant_id_lookup(scope_node, receiver->name);
4421
4422 pm_compile_call(iseq, receiver, ret, popped, scope_node, method_id, NULL);
4423 }
4424 else {
4425 pm_compile_defined_expr0(iseq, cast->receiver, node_location, ret, popped, scope_node, true, lfinish, false);
4426 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4427 PM_COMPILE(cast->receiver);
4428 }
4429
4430 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
4431
4432 if (explicit_receiver) PUSH_INSN(ret, location, dup);
4433 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_METHOD), rb_id2sym(method_id), PUSH_VAL(DEFINED_METHOD));
4434 }
4435 else {
4436 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
4437
4438 PUSH_INSN(ret, location, putself);
4439 if (explicit_receiver) PUSH_INSN(ret, location, dup);
4440
4441 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_FUNC), rb_id2sym(method_id), PUSH_VAL(DEFINED_METHOD));
4442 }
4443
4444 return;
4445
4446#undef BLOCK_P
4447 }
4448 case PM_ARGUMENTS_NODE: {
4449 // defined?(a(1, 2, 3))
4450 // ^^^^^^^
4451 const pm_arguments_node_t *cast = (const pm_arguments_node_t *) node;
4452
4453 for (size_t index = 0; index < cast->arguments.size; index++) {
4454 pm_compile_defined_expr0(iseq, cast->arguments.nodes[index], node_location, ret, popped, scope_node, in_condition, lfinish, false);
4455 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4456 }
4457
4458 dtype = DEFINED_EXPR;
4459 break;
4460 }
4461 case PM_BLOCK_ARGUMENT_NODE:
4462 // defined?(a(&b))
4463 // ^^
4464 dtype = DEFINED_EXPR;
4465 break;
4466 case PM_FORWARDING_ARGUMENTS_NODE:
4467 // def a(...) = defined?(a(...))
4468 // ^^^
4469 dtype = DEFINED_EXPR;
4470 break;
4471 case PM_SPLAT_NODE: {
4472 // def a(*) = defined?(a(*))
4473 // ^
4474 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
4475
4476 if (cast->expression == NULL) {
4477 dtype = DEFINED_EXPR;
4478 break;
4479 }
4480
4481 pm_compile_defined_expr0(iseq, cast->expression, node_location, ret, popped, scope_node, in_condition, lfinish, false);
4482
4483 if (!lfinish[1]) lfinish[1] = NEW_LABEL(location.line);
4484 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4485
4486 dtype = DEFINED_EXPR;
4487 break;
4488 }
4489 case PM_SHAREABLE_CONSTANT_NODE:
4490 // # shareable_constant_value: literal
4491 // defined?(A = 1)
4492 // ^^^^^
4493 pm_compile_defined_expr0(iseq, ((const pm_shareable_constant_node_t *) node)->write, node_location, ret, popped, scope_node, in_condition, lfinish, explicit_receiver);
4494 return;
4495/* Unreachable (parameters) ***************************************************/
4496 case PM_BLOCK_LOCAL_VARIABLE_NODE:
4497 case PM_BLOCK_PARAMETER_NODE:
4498 case PM_BLOCK_PARAMETERS_NODE:
4499 case PM_FORWARDING_PARAMETER_NODE:
4500 case PM_IMPLICIT_REST_NODE:
4501 case PM_IT_PARAMETERS_NODE:
4502 case PM_PARAMETERS_NODE:
4503 case PM_KEYWORD_REST_PARAMETER_NODE:
4504 case PM_NO_KEYWORDS_PARAMETER_NODE:
4505 case PM_NUMBERED_PARAMETERS_NODE:
4506 case PM_OPTIONAL_KEYWORD_PARAMETER_NODE:
4507 case PM_OPTIONAL_PARAMETER_NODE:
4508 case PM_REQUIRED_KEYWORD_PARAMETER_NODE:
4509 case PM_REQUIRED_PARAMETER_NODE:
4510 case PM_REST_PARAMETER_NODE:
4511/* Unreachable (pattern matching) *********************************************/
4512 case PM_ALTERNATION_PATTERN_NODE:
4513 case PM_ARRAY_PATTERN_NODE:
4514 case PM_CAPTURE_PATTERN_NODE:
4515 case PM_FIND_PATTERN_NODE:
4516 case PM_HASH_PATTERN_NODE:
4517 case PM_PINNED_EXPRESSION_NODE:
4518 case PM_PINNED_VARIABLE_NODE:
4519/* Unreachable (indirect writes) **********************************************/
4520 case PM_CALL_TARGET_NODE:
4521 case PM_CLASS_VARIABLE_TARGET_NODE:
4522 case PM_CONSTANT_PATH_TARGET_NODE:
4523 case PM_CONSTANT_TARGET_NODE:
4524 case PM_GLOBAL_VARIABLE_TARGET_NODE:
4525 case PM_INDEX_TARGET_NODE:
4526 case PM_INSTANCE_VARIABLE_TARGET_NODE:
4527 case PM_LOCAL_VARIABLE_TARGET_NODE:
4528 case PM_MULTI_TARGET_NODE:
4529/* Unreachable (clauses) ******************************************************/
4530 case PM_ELSE_NODE:
4531 case PM_ENSURE_NODE:
4532 case PM_IN_NODE:
4533 case PM_RESCUE_NODE:
4534 case PM_WHEN_NODE:
4535/* Unreachable (miscellaneous) ************************************************/
4536 case PM_BLOCK_NODE:
4537 case PM_EMBEDDED_STATEMENTS_NODE:
4538 case PM_EMBEDDED_VARIABLE_NODE:
4539 case PM_MISSING_NODE:
4540 case PM_PRE_EXECUTION_NODE:
4541 case PM_PROGRAM_NODE:
4542 case PM_SCOPE_NODE:
4543 case PM_STATEMENTS_NODE:
4544 rb_bug("Unreachable node in defined?: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
4545 }
4546
4547 RUBY_ASSERT(dtype != DEFINED_NOT_DEFINED);
4548 PUSH_INSN1(ret, location, putobject, PUSH_VAL(dtype));
4549
4550#undef PUSH_VAL
4551}
4552
4553static void
4554pm_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition, LABEL **lfinish)
4555{
4556 LINK_ELEMENT *lcur = ret->last;
4557 pm_compile_defined_expr0(iseq, node, node_location, ret, popped, scope_node, in_condition, lfinish, false);
4558
4559 if (lfinish[1]) {
4560 LABEL *lstart = NEW_LABEL(node_location->line);
4561 LABEL *lend = NEW_LABEL(node_location->line);
4562
4564 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq, NULL);
4565
4566 const rb_iseq_t *rescue = new_child_iseq_with_callback(
4567 iseq,
4568 ifunc,
4569 rb_str_concat(rb_str_new2("defined guard in "), ISEQ_BODY(iseq)->location.label),
4570 iseq,
4571 ISEQ_TYPE_RESCUE,
4572 0
4573 );
4574
4575 lstart->rescued = LABEL_RESCUE_BEG;
4576 lend->rescued = LABEL_RESCUE_END;
4577
4578 APPEND_LABEL(ret, lcur, lstart);
4579 PUSH_LABEL(ret, lend);
4580 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lfinish[1]);
4581 }
4582}
4583
4584static void
4585pm_compile_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition)
4586{
4587 LABEL *lfinish[3];
4588 LINK_ELEMENT *last = ret->last;
4589
4590 lfinish[0] = NEW_LABEL(node_location->line);
4591 lfinish[1] = 0;
4592 lfinish[2] = 0;
4593
4594 if (!popped) {
4595 pm_defined_expr(iseq, node, node_location, ret, popped, scope_node, in_condition, lfinish);
4596 }
4597
4598 if (lfinish[1]) {
4599 ELEM_INSERT_NEXT(last, &new_insn_body(iseq, node_location->line, node_location->node_id, BIN(putnil), 0)->link);
4600 PUSH_INSN(ret, *node_location, swap);
4601
4602 if (lfinish[2]) PUSH_LABEL(ret, lfinish[2]);
4603 PUSH_INSN(ret, *node_location, pop);
4604 PUSH_LABEL(ret, lfinish[1]);
4605
4606 }
4607
4608 PUSH_LABEL(ret, lfinish[0]);
4609}
4610
4611// This is exactly the same as add_ensure_iseq, except it compiled
4612// the node as a Prism node, and not a CRuby node
4613static void
4614pm_add_ensure_iseq(LINK_ANCHOR *const ret, rb_iseq_t *iseq, int is_return, pm_scope_node_t *scope_node)
4615{
4616 RUBY_ASSERT(can_add_ensure_iseq(iseq));
4617
4619 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
4620 struct iseq_compile_data_ensure_node_stack *prev_enlp = enlp;
4621 DECL_ANCHOR(ensure);
4622
4623 while (enlp) {
4624 if (enlp->erange != NULL) {
4625 DECL_ANCHOR(ensure_part);
4626 LABEL *lstart = NEW_LABEL(0);
4627 LABEL *lend = NEW_LABEL(0);
4628
4629 add_ensure_range(iseq, enlp->erange, lstart, lend);
4630
4631 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enlp->prev;
4632 PUSH_LABEL(ensure_part, lstart);
4633 bool popped = true;
4634 PM_COMPILE_INTO_ANCHOR(ensure_part, (const pm_node_t *) enlp->ensure_node);
4635 PUSH_LABEL(ensure_part, lend);
4636 PUSH_SEQ(ensure, ensure_part);
4637 }
4638 else {
4639 if (!is_return) {
4640 break;
4641 }
4642 }
4643 enlp = enlp->prev;
4644 }
4645 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = prev_enlp;
4646 PUSH_SEQ(ret, ensure);
4647}
4648
4650 pm_scope_node_t *scope_node;
4651 rb_ast_id_table_t *local_table_for_iseq;
4652 int local_index;
4653};
4654
4655static int
4656pm_local_table_insert_func(st_data_t *key, st_data_t *value, st_data_t arg, int existing)
4657{
4658 if (!existing) {
4659 pm_constant_id_t constant_id = (pm_constant_id_t) *key;
4660 struct pm_local_table_insert_ctx * ctx = (struct pm_local_table_insert_ctx *) arg;
4661
4662 pm_scope_node_t *scope_node = ctx->scope_node;
4663 rb_ast_id_table_t *local_table_for_iseq = ctx->local_table_for_iseq;
4664 int local_index = ctx->local_index;
4665
4666 ID local = pm_constant_id_lookup(scope_node, constant_id);
4667 local_table_for_iseq->ids[local_index] = local;
4668
4669 *value = (st_data_t)local_index;
4670
4671 ctx->local_index++;
4672 }
4673
4674 return ST_CONTINUE;
4675}
4676
4682static void
4683pm_insert_local_index(pm_constant_id_t constant_id, int local_index, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq, pm_scope_node_t *scope_node)
4684{
4685 RUBY_ASSERT((constant_id & PM_SPECIAL_CONSTANT_FLAG) == 0);
4686
4687 ID local = pm_constant_id_lookup(scope_node, constant_id);
4688 local_table_for_iseq->ids[local_index] = local;
4689 st_insert(index_lookup_table, (st_data_t) constant_id, (st_data_t) local_index);
4690}
4691
4696static void
4697pm_insert_local_special(ID local_name, int local_index, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq)
4698{
4699 local_table_for_iseq->ids[local_index] = local_name;
4700 st_insert(index_lookup_table, (st_data_t) (local_name | PM_SPECIAL_CONSTANT_FLAG), (st_data_t) local_index);
4701}
4702
4709static int
4710pm_compile_destructured_param_locals(const pm_multi_target_node_t *node, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq, pm_scope_node_t *scope_node, int local_index)
4711{
4712 for (size_t index = 0; index < node->lefts.size; index++) {
4713 const pm_node_t *left = node->lefts.nodes[index];
4714
4715 if (PM_NODE_TYPE_P(left, PM_REQUIRED_PARAMETER_NODE)) {
4716 if (!PM_NODE_FLAG_P(left, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
4717 pm_insert_local_index(((const pm_required_parameter_node_t *) left)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
4718 local_index++;
4719 }
4720 }
4721 else {
4722 RUBY_ASSERT(PM_NODE_TYPE_P(left, PM_MULTI_TARGET_NODE));
4723 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) left, index_lookup_table, local_table_for_iseq, scope_node, local_index);
4724 }
4725 }
4726
4727 if (node->rest != NULL && PM_NODE_TYPE_P(node->rest, PM_SPLAT_NODE)) {
4728 const pm_splat_node_t *rest = (const pm_splat_node_t *) node->rest;
4729
4730 if (rest->expression != NULL) {
4731 RUBY_ASSERT(PM_NODE_TYPE_P(rest->expression, PM_REQUIRED_PARAMETER_NODE));
4732
4733 if (!PM_NODE_FLAG_P(rest->expression, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
4734 pm_insert_local_index(((const pm_required_parameter_node_t *) rest->expression)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
4735 local_index++;
4736 }
4737 }
4738 }
4739
4740 for (size_t index = 0; index < node->rights.size; index++) {
4741 const pm_node_t *right = node->rights.nodes[index];
4742
4743 if (PM_NODE_TYPE_P(right, PM_REQUIRED_PARAMETER_NODE)) {
4744 if (!PM_NODE_FLAG_P(right, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
4745 pm_insert_local_index(((const pm_required_parameter_node_t *) right)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
4746 local_index++;
4747 }
4748 }
4749 else {
4750 RUBY_ASSERT(PM_NODE_TYPE_P(right, PM_MULTI_TARGET_NODE));
4751 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) right, index_lookup_table, local_table_for_iseq, scope_node, local_index);
4752 }
4753 }
4754
4755 return local_index;
4756}
4757
4762static inline void
4763pm_compile_destructured_param_write(rb_iseq_t *iseq, const pm_required_parameter_node_t *node, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
4764{
4765 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
4766 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, node->name, 0);
4767 PUSH_SETLOCAL(ret, location, index.index, index.level);
4768}
4769
4778static void
4779pm_compile_destructured_param_writes(rb_iseq_t *iseq, const pm_multi_target_node_t *node, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
4780{
4781 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
4782 bool has_rest = (node->rest && PM_NODE_TYPE_P(node->rest, PM_SPLAT_NODE) && (((const pm_splat_node_t *) node->rest)->expression) != NULL);
4783 bool has_rights = node->rights.size > 0;
4784
4785 int flag = (has_rest || has_rights) ? 1 : 0;
4786 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->lefts.size), INT2FIX(flag));
4787
4788 for (size_t index = 0; index < node->lefts.size; index++) {
4789 const pm_node_t *left = node->lefts.nodes[index];
4790
4791 if (PM_NODE_TYPE_P(left, PM_REQUIRED_PARAMETER_NODE)) {
4792 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) left, ret, scope_node);
4793 }
4794 else {
4795 RUBY_ASSERT(PM_NODE_TYPE_P(left, PM_MULTI_TARGET_NODE));
4796 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) left, ret, scope_node);
4797 }
4798 }
4799
4800 if (has_rest) {
4801 if (has_rights) {
4802 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->rights.size), INT2FIX(3));
4803 }
4804
4805 const pm_node_t *rest = ((const pm_splat_node_t *) node->rest)->expression;
4806 RUBY_ASSERT(PM_NODE_TYPE_P(rest, PM_REQUIRED_PARAMETER_NODE));
4807
4808 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) rest, ret, scope_node);
4809 }
4810
4811 if (has_rights) {
4812 if (!has_rest) {
4813 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->rights.size), INT2FIX(2));
4814 }
4815
4816 for (size_t index = 0; index < node->rights.size; index++) {
4817 const pm_node_t *right = node->rights.nodes[index];
4818
4819 if (PM_NODE_TYPE_P(right, PM_REQUIRED_PARAMETER_NODE)) {
4820 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) right, ret, scope_node);
4821 }
4822 else {
4823 RUBY_ASSERT(PM_NODE_TYPE_P(right, PM_MULTI_TARGET_NODE));
4824 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) right, ret, scope_node);
4825 }
4826 }
4827 }
4828}
4829
4835 // The pointer to the topn instruction that will need to be modified after
4836 // we know the total stack size of all of the targets.
4837 INSN *topn;
4838
4839 // The index of the stack from the base of the entire multi target at which
4840 // the parent expression is located.
4841 size_t stack_index;
4842
4843 // The number of slots in the stack that this node occupies.
4844 size_t stack_size;
4845
4846 // The position of the node in the list of targets.
4847 size_t position;
4848
4849 // A pointer to the next node in this linked list.
4850 struct pm_multi_target_state_node *next;
4852
4860typedef struct {
4861 // The total number of slots in the stack that this multi target occupies.
4862 size_t stack_size;
4863
4864 // The position of the current node being compiled. This is forwarded to
4865 // nodes when they are allocated.
4866 size_t position;
4867
4868 // A pointer to the head of this linked list.
4870
4871 // A pointer to the tail of this linked list.
4874
4878static void
4879pm_multi_target_state_push(pm_multi_target_state_t *state, INSN *topn, size_t stack_size)
4880{
4882 node->topn = topn;
4883 node->stack_index = state->stack_size + 1;
4884 node->stack_size = stack_size;
4885 node->position = state->position;
4886 node->next = NULL;
4887
4888 if (state->head == NULL) {
4889 state->head = node;
4890 state->tail = node;
4891 }
4892 else {
4893 state->tail->next = node;
4894 state->tail = node;
4895 }
4896
4897 state->stack_size += stack_size;
4898}
4899
4905static void
4906pm_multi_target_state_update(pm_multi_target_state_t *state)
4907{
4908 // If nothing was ever pushed onto the stack, then we don't need to do any
4909 // kind of updates.
4910 if (state->stack_size == 0) return;
4911
4912 pm_multi_target_state_node_t *current = state->head;
4914
4915 while (current != NULL) {
4916 VALUE offset = INT2FIX(state->stack_size - current->stack_index + current->position);
4917 current->topn->operands[0] = offset;
4918
4919 // stack_size will be > 1 in the case that we compiled an index target
4920 // and it had arguments. In this case, we use multiple topn instructions
4921 // to grab up all of the arguments as well, so those offsets need to be
4922 // updated as well.
4923 if (current->stack_size > 1) {
4924 INSN *insn = current->topn;
4925
4926 for (size_t index = 1; index < current->stack_size; index += 1) {
4927 LINK_ELEMENT *element = get_next_insn(insn);
4928 RUBY_ASSERT(IS_INSN(element));
4929
4930 insn = (INSN *) element;
4931 RUBY_ASSERT(insn->insn_id == BIN(topn));
4932
4933 insn->operands[0] = offset;
4934 }
4935 }
4936
4937 previous = current;
4938 current = current->next;
4939
4940 xfree(previous);
4941 }
4942}
4943
4944static void
4945pm_compile_multi_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state);
4946
4975static void
4976pm_compile_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state)
4977{
4978 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
4979
4980 switch (PM_NODE_TYPE(node)) {
4981 case PM_LOCAL_VARIABLE_TARGET_NODE: {
4982 // Local variable targets have no parent expression, so they only need
4983 // to compile the write.
4984 //
4985 // for i in []; end
4986 //
4988 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
4989
4990 PUSH_SETLOCAL(writes, location, index.index, index.level);
4991 break;
4992 }
4993 case PM_CLASS_VARIABLE_TARGET_NODE: {
4994 // Class variable targets have no parent expression, so they only need
4995 // to compile the write.
4996 //
4997 // for @@i in []; end
4998 //
5000 ID name = pm_constant_id_lookup(scope_node, cast->name);
5001
5002 VALUE operand = ID2SYM(name);
5003 PUSH_INSN2(writes, location, setclassvariable, operand, get_cvar_ic_value(iseq, name));
5004 break;
5005 }
5006 case PM_CONSTANT_TARGET_NODE: {
5007 // Constant targets have no parent expression, so they only need to
5008 // compile the write.
5009 //
5010 // for I in []; end
5011 //
5012 const pm_constant_target_node_t *cast = (const pm_constant_target_node_t *) node;
5013 ID name = pm_constant_id_lookup(scope_node, cast->name);
5014
5015 VALUE operand = ID2SYM(name);
5016 PUSH_INSN1(writes, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5017 PUSH_INSN1(writes, location, setconstant, operand);
5018 break;
5019 }
5020 case PM_GLOBAL_VARIABLE_TARGET_NODE: {
5021 // Global variable targets have no parent expression, so they only need
5022 // to compile the write.
5023 //
5024 // for $i in []; end
5025 //
5027 ID name = pm_constant_id_lookup(scope_node, cast->name);
5028
5029 VALUE operand = ID2SYM(name);
5030 PUSH_INSN1(writes, location, setglobal, operand);
5031 break;
5032 }
5033 case PM_INSTANCE_VARIABLE_TARGET_NODE: {
5034 // Instance variable targets have no parent expression, so they only
5035 // need to compile the write.
5036 //
5037 // for @i in []; end
5038 //
5040 ID name = pm_constant_id_lookup(scope_node, cast->name);
5041
5042 VALUE operand = ID2SYM(name);
5043 PUSH_INSN2(writes, location, setinstancevariable, operand, get_ivar_ic_value(iseq, name));
5044 break;
5045 }
5046 case PM_CONSTANT_PATH_TARGET_NODE: {
5047 // Constant path targets have a parent expression that is the object
5048 // that owns the constant. This needs to be compiled first into the
5049 // parents sequence. If no parent is found, then it represents using the
5050 // unary :: operator to indicate a top-level constant. In that case we
5051 // need to push Object onto the stack.
5052 //
5053 // for I::J in []; end
5054 //
5056 ID name = pm_constant_id_lookup(scope_node, cast->name);
5057
5058 if (cast->parent != NULL) {
5059 pm_compile_node(iseq, cast->parent, parents, false, scope_node);
5060 }
5061 else {
5062 PUSH_INSN1(parents, location, putobject, rb_cObject);
5063 }
5064
5065 if (state == NULL) {
5066 PUSH_INSN(writes, location, swap);
5067 }
5068 else {
5069 PUSH_INSN1(writes, location, topn, INT2FIX(1));
5070 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), 1);
5071 }
5072
5073 VALUE operand = ID2SYM(name);
5074 PUSH_INSN1(writes, location, setconstant, operand);
5075
5076 if (state != NULL) {
5077 PUSH_INSN(cleanup, location, pop);
5078 }
5079
5080 break;
5081 }
5082 case PM_CALL_TARGET_NODE: {
5083 // Call targets have a parent expression that is the receiver of the
5084 // method being called. This needs to be compiled first into the parents
5085 // sequence. These nodes cannot have arguments, so the method call is
5086 // compiled with a single argument which represents the value being
5087 // written.
5088 //
5089 // for i.j in []; end
5090 //
5091 const pm_call_target_node_t *cast = (const pm_call_target_node_t *) node;
5092 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
5093
5094 pm_compile_node(iseq, cast->receiver, parents, false, scope_node);
5095
5096 LABEL *safe_label = NULL;
5097 if (PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
5098 safe_label = NEW_LABEL(location.line);
5099 PUSH_INSN(parents, location, dup);
5100 PUSH_INSNL(parents, location, branchnil, safe_label);
5101 }
5102
5103 if (state != NULL) {
5104 PUSH_INSN1(writes, location, topn, INT2FIX(1));
5105 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), 1);
5106 PUSH_INSN(writes, location, swap);
5107 }
5108
5109 int flags = VM_CALL_ARGS_SIMPLE;
5110 if (PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) flags |= VM_CALL_FCALL;
5111
5112 PUSH_SEND_WITH_FLAG(writes, location, method_id, INT2FIX(1), INT2FIX(flags));
5113 if (safe_label != NULL && state == NULL) PUSH_LABEL(writes, safe_label);
5114 PUSH_INSN(writes, location, pop);
5115 if (safe_label != NULL && state != NULL) PUSH_LABEL(writes, safe_label);
5116
5117 if (state != NULL) {
5118 PUSH_INSN(cleanup, location, pop);
5119 }
5120
5121 break;
5122 }
5123 case PM_INDEX_TARGET_NODE: {
5124 // Index targets have a parent expression that is the receiver of the
5125 // method being called and any additional arguments that are being
5126 // passed along with the value being written. The receiver and arguments
5127 // both need to be on the stack. Note that this is even more complicated
5128 // by the fact that these nodes can hold a block using the unary &
5129 // operator.
5130 //
5131 // for i[:j] in []; end
5132 //
5133 const pm_index_target_node_t *cast = (const pm_index_target_node_t *) node;
5134
5135 pm_compile_node(iseq, cast->receiver, parents, false, scope_node);
5136
5137 int flags = 0;
5138 struct rb_callinfo_kwarg *kwargs = NULL;
5139 int argc = pm_setup_args(cast->arguments, (const pm_node_t *) cast->block, &flags, &kwargs, iseq, parents, scope_node, &location);
5140
5141 if (state != NULL) {
5142 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
5143 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), argc + 1);
5144
5145 if (argc == 0) {
5146 PUSH_INSN(writes, location, swap);
5147 }
5148 else {
5149 for (int index = 0; index < argc; index++) {
5150 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
5151 }
5152 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
5153 }
5154 }
5155
5156 // The argc that we're going to pass to the send instruction is the
5157 // number of arguments + 1 for the value being written. If there's a
5158 // splat, then we need to insert newarray and concatarray instructions
5159 // after the arguments have been written.
5160 int ci_argc = argc + 1;
5161 if (flags & VM_CALL_ARGS_SPLAT) {
5162 ci_argc--;
5163 PUSH_INSN1(writes, location, newarray, INT2FIX(1));
5164 PUSH_INSN(writes, location, concatarray);
5165 }
5166
5167 PUSH_SEND_R(writes, location, idASET, INT2NUM(ci_argc), NULL, INT2FIX(flags), kwargs);
5168 PUSH_INSN(writes, location, pop);
5169
5170 if (state != NULL) {
5171 if (argc != 0) {
5172 PUSH_INSN(writes, location, pop);
5173 }
5174
5175 for (int index = 0; index < argc + 1; index++) {
5176 PUSH_INSN(cleanup, location, pop);
5177 }
5178 }
5179
5180 break;
5181 }
5182 case PM_MULTI_TARGET_NODE: {
5183 // Multi target nodes represent a set of writes to multiple variables.
5184 // The parent expressions are the combined set of the parent expressions
5185 // of its inner target nodes.
5186 //
5187 // for i, j in []; end
5188 //
5189 size_t before_position;
5190 if (state != NULL) {
5191 before_position = state->position;
5192 state->position--;
5193 }
5194
5195 pm_compile_multi_target_node(iseq, node, parents, writes, cleanup, scope_node, state);
5196 if (state != NULL) state->position = before_position;
5197
5198 break;
5199 }
5200 case PM_SPLAT_NODE: {
5201 // Splat nodes capture all values into an array. They can be used
5202 // as targets in assignments or for loops.
5203 //
5204 // for *x in []; end
5205 //
5206 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
5207
5208 if (cast->expression != NULL) {
5209 pm_compile_target_node(iseq, cast->expression, parents, writes, cleanup, scope_node, state);
5210 }
5211
5212 break;
5213 }
5214 default:
5215 rb_bug("Unexpected node type: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
5216 break;
5217 }
5218}
5219
5225static void
5226pm_compile_multi_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state)
5227{
5228 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5229 const pm_node_list_t *lefts;
5230 const pm_node_t *rest;
5231 const pm_node_list_t *rights;
5232
5233 switch (PM_NODE_TYPE(node)) {
5234 case PM_MULTI_TARGET_NODE: {
5235 const pm_multi_target_node_t *cast = (const pm_multi_target_node_t *) node;
5236 lefts = &cast->lefts;
5237 rest = cast->rest;
5238 rights = &cast->rights;
5239 break;
5240 }
5241 case PM_MULTI_WRITE_NODE: {
5242 const pm_multi_write_node_t *cast = (const pm_multi_write_node_t *) node;
5243 lefts = &cast->lefts;
5244 rest = cast->rest;
5245 rights = &cast->rights;
5246 break;
5247 }
5248 default:
5249 rb_bug("Unsupported node %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
5250 break;
5251 }
5252
5253 bool has_rest = (rest != NULL) && PM_NODE_TYPE_P(rest, PM_SPLAT_NODE) && ((const pm_splat_node_t *) rest)->expression != NULL;
5254 bool has_posts = rights->size > 0;
5255
5256 // The first instruction in the writes sequence is going to spread the
5257 // top value of the stack onto the number of values that we're going to
5258 // write.
5259 PUSH_INSN2(writes, location, expandarray, INT2FIX(lefts->size), INT2FIX((has_rest || has_posts) ? 1 : 0));
5260
5261 // We need to keep track of some additional state information as we're
5262 // going through the targets because we will need to revisit them once
5263 // we know how many values are being pushed onto the stack.
5264 pm_multi_target_state_t target_state = { 0 };
5265 if (state == NULL) state = &target_state;
5266
5267 size_t base_position = state->position;
5268 size_t splat_position = (has_rest || has_posts) ? 1 : 0;
5269
5270 // Next, we'll iterate through all of the leading targets.
5271 for (size_t index = 0; index < lefts->size; index++) {
5272 const pm_node_t *target = lefts->nodes[index];
5273 state->position = lefts->size - index + splat_position + base_position;
5274 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, state);
5275 }
5276
5277 // Next, we'll compile the rest target if there is one.
5278 if (has_rest) {
5279 const pm_node_t *target = ((const pm_splat_node_t *) rest)->expression;
5280 state->position = 1 + rights->size + base_position;
5281
5282 if (has_posts) {
5283 PUSH_INSN2(writes, location, expandarray, INT2FIX(rights->size), INT2FIX(3));
5284 }
5285
5286 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, state);
5287 }
5288
5289 // Finally, we'll compile the trailing targets.
5290 if (has_posts) {
5291 if (!has_rest && rest != NULL) {
5292 PUSH_INSN2(writes, location, expandarray, INT2FIX(rights->size), INT2FIX(2));
5293 }
5294
5295 for (size_t index = 0; index < rights->size; index++) {
5296 const pm_node_t *target = rights->nodes[index];
5297 state->position = rights->size - index + base_position;
5298 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, state);
5299 }
5300 }
5301}
5302
5308static void
5309pm_compile_for_node_index(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node)
5310{
5311 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5312
5313 switch (PM_NODE_TYPE(node)) {
5314 case PM_LOCAL_VARIABLE_TARGET_NODE: {
5315 // For local variables, all we have to do is retrieve the value and then
5316 // compile the index node.
5317 PUSH_GETLOCAL(ret, location, 1, 0);
5318 pm_compile_target_node(iseq, node, ret, ret, ret, scope_node, NULL);
5319 break;
5320 }
5321 case PM_CLASS_VARIABLE_TARGET_NODE:
5322 case PM_CONSTANT_TARGET_NODE:
5323 case PM_GLOBAL_VARIABLE_TARGET_NODE:
5324 case PM_INSTANCE_VARIABLE_TARGET_NODE:
5325 case PM_CONSTANT_PATH_TARGET_NODE:
5326 case PM_CALL_TARGET_NODE:
5327 case PM_INDEX_TARGET_NODE: {
5328 // For other targets, we need to potentially compile the parent or
5329 // owning expression of this target, then retrieve the value, expand it,
5330 // and then compile the necessary writes.
5331 DECL_ANCHOR(writes);
5332 DECL_ANCHOR(cleanup);
5333
5334 pm_multi_target_state_t state = { 0 };
5335 state.position = 1;
5336 pm_compile_target_node(iseq, node, ret, writes, cleanup, scope_node, &state);
5337
5338 PUSH_GETLOCAL(ret, location, 1, 0);
5339 PUSH_INSN2(ret, location, expandarray, INT2FIX(1), INT2FIX(0));
5340
5341 PUSH_SEQ(ret, writes);
5342 PUSH_SEQ(ret, cleanup);
5343
5344 pm_multi_target_state_update(&state);
5345 break;
5346 }
5347 case PM_SPLAT_NODE:
5348 case PM_MULTI_TARGET_NODE: {
5349 DECL_ANCHOR(writes);
5350 DECL_ANCHOR(cleanup);
5351
5352 pm_compile_target_node(iseq, node, ret, writes, cleanup, scope_node, NULL);
5353
5354 LABEL *not_single = NEW_LABEL(location.line);
5355 LABEL *not_ary = NEW_LABEL(location.line);
5356
5357 // When there are multiple targets, we'll do a bunch of work to convert
5358 // the value into an array before we expand it. Effectively we're trying
5359 // to accomplish:
5360 //
5361 // (args.length == 1 && Array.try_convert(args[0])) || args
5362 //
5363 PUSH_GETLOCAL(ret, location, 1, 0);
5364 PUSH_INSN(ret, location, dup);
5365 PUSH_CALL(ret, location, idLength, INT2FIX(0));
5366 PUSH_INSN1(ret, location, putobject, INT2FIX(1));
5367 PUSH_CALL(ret, location, idEq, INT2FIX(1));
5368 PUSH_INSNL(ret, location, branchunless, not_single);
5369 PUSH_INSN(ret, location, dup);
5370 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
5371 PUSH_CALL(ret, location, idAREF, INT2FIX(1));
5372 PUSH_INSN1(ret, location, putobject, rb_cArray);
5373 PUSH_INSN(ret, location, swap);
5374 PUSH_CALL(ret, location, rb_intern("try_convert"), INT2FIX(1));
5375 PUSH_INSN(ret, location, dup);
5376 PUSH_INSNL(ret, location, branchunless, not_ary);
5377 PUSH_INSN(ret, location, swap);
5378
5379 PUSH_LABEL(ret, not_ary);
5380 PUSH_INSN(ret, location, pop);
5381
5382 PUSH_LABEL(ret, not_single);
5383
5384 if (PM_NODE_TYPE_P(node, PM_SPLAT_NODE)) {
5385 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
5386 PUSH_INSN2(ret, location, expandarray, INT2FIX(0), INT2FIX(cast->expression == NULL ? 0 : 1));
5387 }
5388
5389 PUSH_SEQ(ret, writes);
5390 PUSH_SEQ(ret, cleanup);
5391 break;
5392 }
5393 default:
5394 rb_bug("Unexpected node type for index in for node: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
5395 break;
5396 }
5397}
5398
5399static void
5400pm_compile_rescue(rb_iseq_t *iseq, const pm_begin_node_t *cast, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5401{
5402 const pm_parser_t *parser = scope_node->parser;
5403
5404 LABEL *lstart = NEW_LABEL(node_location->line);
5405 LABEL *lend = NEW_LABEL(node_location->line);
5406 LABEL *lcont = NEW_LABEL(node_location->line);
5407
5408 pm_scope_node_t rescue_scope_node;
5409 pm_scope_node_init((const pm_node_t *) cast->rescue_clause, &rescue_scope_node, scope_node);
5410
5411 rb_iseq_t *rescue_iseq = NEW_CHILD_ISEQ(
5412 &rescue_scope_node,
5413 rb_str_concat(rb_str_new2("rescue in "), ISEQ_BODY(iseq)->location.label),
5414 ISEQ_TYPE_RESCUE,
5415 pm_node_line_number(parser, (const pm_node_t *) cast->rescue_clause)
5416 );
5417
5418 pm_scope_node_destroy(&rescue_scope_node);
5419
5420 lstart->rescued = LABEL_RESCUE_BEG;
5421 lend->rescued = LABEL_RESCUE_END;
5422 PUSH_LABEL(ret, lstart);
5423
5424 bool prev_in_rescue = ISEQ_COMPILE_DATA(iseq)->in_rescue;
5425 ISEQ_COMPILE_DATA(iseq)->in_rescue = true;
5426
5427 if (cast->statements != NULL) {
5428 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->statements);
5429 }
5430 else {
5431 const pm_node_location_t location = PM_NODE_START_LOCATION(parser, cast->rescue_clause);
5432 PUSH_INSN(ret, location, putnil);
5433 }
5434
5435 ISEQ_COMPILE_DATA(iseq)->in_rescue = prev_in_rescue;
5436 PUSH_LABEL(ret, lend);
5437
5438 if (cast->else_clause != NULL) {
5439 if (!popped) PUSH_INSN(ret, *node_location, pop);
5440 PM_COMPILE((const pm_node_t *) cast->else_clause);
5441 }
5442
5443 PUSH_INSN(ret, *node_location, nop);
5444 PUSH_LABEL(ret, lcont);
5445
5446 if (popped) PUSH_INSN(ret, *node_location, pop);
5447 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue_iseq, lcont);
5448 PUSH_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
5449}
5450
5451static void
5452pm_compile_ensure(rb_iseq_t *iseq, const pm_begin_node_t *cast, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5453{
5454 const pm_parser_t *parser = scope_node->parser;
5455 const pm_statements_node_t *statements = cast->ensure_clause->statements;
5456
5457 pm_node_location_t location;
5458 if (statements != NULL) {
5459 location = PM_NODE_START_LOCATION(parser, statements);
5460 }
5461 else {
5462 location = *node_location;
5463 }
5464
5465 LABEL *lstart = NEW_LABEL(location.line);
5466 LABEL *lend = NEW_LABEL(location.line);
5467 LABEL *lcont = NEW_LABEL(location.line);
5468
5469 struct ensure_range er;
5471 struct ensure_range *erange;
5472
5473 DECL_ANCHOR(ensr);
5474 if (statements != NULL) {
5475 pm_compile_node(iseq, (const pm_node_t *) statements, ensr, true, scope_node);
5476 }
5477
5478 LINK_ELEMENT *last = ensr->last;
5479 bool last_leave = last && IS_INSN(last) && IS_INSN_ID(last, leave);
5480
5481 er.begin = lstart;
5482 er.end = lend;
5483 er.next = 0;
5484 push_ensure_entry(iseq, &enl, &er, (void *) cast->ensure_clause);
5485
5486 PUSH_LABEL(ret, lstart);
5487 if (cast->rescue_clause != NULL) {
5488 pm_compile_rescue(iseq, cast, node_location, ret, popped | last_leave, scope_node);
5489 }
5490 else if (cast->statements != NULL) {
5491 pm_compile_node(iseq, (const pm_node_t *) cast->statements, ret, popped | last_leave, scope_node);
5492 }
5493 else if (!(popped | last_leave)) {
5494 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
5495 }
5496
5497 PUSH_LABEL(ret, lend);
5498 PUSH_SEQ(ret, ensr);
5499 if (!popped && last_leave) PUSH_INSN(ret, *node_location, putnil);
5500 PUSH_LABEL(ret, lcont);
5501 if (last_leave) PUSH_INSN(ret, *node_location, pop);
5502
5503 pm_scope_node_t next_scope_node;
5504 pm_scope_node_init((const pm_node_t *) cast->ensure_clause, &next_scope_node, scope_node);
5505
5506 rb_iseq_t *child_iseq = NEW_CHILD_ISEQ(
5507 &next_scope_node,
5508 rb_str_concat(rb_str_new2("ensure in "), ISEQ_BODY(iseq)->location.label),
5509 ISEQ_TYPE_ENSURE,
5510 location.line
5511 );
5512
5513 pm_scope_node_destroy(&next_scope_node);
5514
5515 erange = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->erange;
5516 if (lstart->link.next != &lend->link) {
5517 while (erange) {
5518 PUSH_CATCH_ENTRY(CATCH_TYPE_ENSURE, erange->begin, erange->end, child_iseq, lcont);
5519 erange = erange->next;
5520 }
5521 }
5522 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl.prev;
5523}
5524
5529static inline bool
5530pm_opt_str_freeze_p(const rb_iseq_t *iseq, const pm_call_node_t *node)
5531{
5532 return (
5533 !PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION) &&
5534 node->receiver != NULL &&
5535 PM_NODE_TYPE_P(node->receiver, PM_STRING_NODE) &&
5536 node->arguments == NULL &&
5537 node->block == NULL &&
5538 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction
5539 );
5540}
5541
5546static void
5547pm_compile_constant_read(rb_iseq_t *iseq, VALUE name, const pm_location_t *name_loc, uint32_t node_id, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
5548{
5549 const pm_node_location_t location = PM_LOCATION_START_LOCATION(scope_node->parser, name_loc, node_id);
5550
5551 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
5552 ISEQ_BODY(iseq)->ic_size++;
5553 VALUE segments = rb_ary_new_from_args(1, name);
5554 RB_OBJ_SET_SHAREABLE(segments);
5555 PUSH_INSN1(ret, location, opt_getconstant_path, segments);
5556 }
5557 else {
5558 PUSH_INSN(ret, location, putnil);
5559 PUSH_INSN1(ret, location, putobject, Qtrue);
5560 PUSH_INSN1(ret, location, getconstant, name);
5561 }
5562}
5563
5568static VALUE
5569pm_constant_path_parts(const pm_node_t *node, const pm_scope_node_t *scope_node)
5570{
5571 VALUE parts = rb_ary_new();
5572
5573 while (true) {
5574 switch (PM_NODE_TYPE(node)) {
5575 case PM_CONSTANT_READ_NODE: {
5576 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
5577 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
5578
5579 rb_ary_unshift(parts, name);
5580 return parts;
5581 }
5582 case PM_CONSTANT_PATH_NODE: {
5583 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
5584 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
5585
5586 rb_ary_unshift(parts, name);
5587 if (cast->parent == NULL) {
5588 rb_ary_unshift(parts, ID2SYM(idNULL));
5589 return parts;
5590 }
5591
5592 node = cast->parent;
5593 break;
5594 }
5595 default:
5596 return Qnil;
5597 }
5598 }
5599}
5600
5606static void
5607pm_compile_constant_path(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const prefix, LINK_ANCHOR *const body, bool popped, pm_scope_node_t *scope_node)
5608{
5609 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5610
5611 switch (PM_NODE_TYPE(node)) {
5612 case PM_CONSTANT_READ_NODE: {
5613 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
5614 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
5615
5616 PUSH_INSN1(body, location, putobject, Qtrue);
5617 PUSH_INSN1(body, location, getconstant, name);
5618 break;
5619 }
5620 case PM_CONSTANT_PATH_NODE: {
5621 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
5622 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
5623
5624 if (cast->parent == NULL) {
5625 PUSH_INSN(body, location, pop);
5626 PUSH_INSN1(body, location, putobject, rb_cObject);
5627 PUSH_INSN1(body, location, putobject, Qtrue);
5628 PUSH_INSN1(body, location, getconstant, name);
5629 }
5630 else {
5631 pm_compile_constant_path(iseq, cast->parent, prefix, body, false, scope_node);
5632 PUSH_INSN1(body, location, putobject, Qfalse);
5633 PUSH_INSN1(body, location, getconstant, name);
5634 }
5635 break;
5636 }
5637 default:
5638 PM_COMPILE_INTO_ANCHOR(prefix, node);
5639 break;
5640 }
5641}
5642
5646static VALUE
5647pm_compile_shareable_constant_literal(rb_iseq_t *iseq, const pm_node_t *node, const pm_scope_node_t *scope_node)
5648{
5649 switch (PM_NODE_TYPE(node)) {
5650 case PM_TRUE_NODE:
5651 case PM_FALSE_NODE:
5652 case PM_NIL_NODE:
5653 case PM_SYMBOL_NODE:
5654 case PM_REGULAR_EXPRESSION_NODE:
5655 case PM_SOURCE_LINE_NODE:
5656 case PM_INTEGER_NODE:
5657 case PM_FLOAT_NODE:
5658 case PM_RATIONAL_NODE:
5659 case PM_IMAGINARY_NODE:
5660 case PM_SOURCE_ENCODING_NODE:
5661 return pm_static_literal_value(iseq, node, scope_node);
5662 case PM_STRING_NODE:
5663 return parse_static_literal_string(iseq, scope_node, node, &((const pm_string_node_t *) node)->unescaped);
5664 case PM_SOURCE_FILE_NODE:
5665 return pm_source_file_value((const pm_source_file_node_t *) node, scope_node);
5666 case PM_ARRAY_NODE: {
5667 const pm_array_node_t *cast = (const pm_array_node_t *) node;
5668 VALUE result = rb_ary_new_capa(cast->elements.size);
5669
5670 for (size_t index = 0; index < cast->elements.size; index++) {
5671 VALUE element = pm_compile_shareable_constant_literal(iseq, cast->elements.nodes[index], scope_node);
5672 if (element == Qundef) return Qundef;
5673
5674 rb_ary_push(result, element);
5675 }
5676
5677 return rb_ractor_make_shareable(result);
5678 }
5679 case PM_HASH_NODE: {
5680 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
5681 VALUE result = rb_hash_new_capa(cast->elements.size);
5682
5683 for (size_t index = 0; index < cast->elements.size; index++) {
5684 const pm_node_t *element = cast->elements.nodes[index];
5685 if (!PM_NODE_TYPE_P(element, PM_ASSOC_NODE)) return Qundef;
5686
5687 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
5688
5689 VALUE key = pm_compile_shareable_constant_literal(iseq, assoc->key, scope_node);
5690 if (key == Qundef) return Qundef;
5691
5692 VALUE value = pm_compile_shareable_constant_literal(iseq, assoc->value, scope_node);
5693 if (value == Qundef) return Qundef;
5694
5695 rb_hash_aset(result, key, value);
5696 }
5697
5698 return rb_ractor_make_shareable(result);
5699 }
5700 default:
5701 return Qundef;
5702 }
5703}
5704
5709static void
5710pm_compile_shareable_constant_value(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_flags_t shareability, VALUE path, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, bool top)
5711{
5712 VALUE literal = pm_compile_shareable_constant_literal(iseq, node, scope_node);
5713 if (literal != Qundef) {
5714 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5715 PUSH_INSN1(ret, location, putobject, literal);
5716 return;
5717 }
5718
5719 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5720 switch (PM_NODE_TYPE(node)) {
5721 case PM_ARRAY_NODE: {
5722 const pm_array_node_t *cast = (const pm_array_node_t *) node;
5723
5724 if (top) {
5725 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5726 }
5727
5728 for (size_t index = 0; index < cast->elements.size; index++) {
5729 pm_compile_shareable_constant_value(iseq, cast->elements.nodes[index], shareability, path, ret, scope_node, false);
5730 }
5731
5732 PUSH_INSN1(ret, location, newarray, INT2FIX(cast->elements.size));
5733
5734 if (top) {
5735 ID method_id = (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) ? rb_intern("make_shareable_copy") : rb_intern("make_shareable");
5736 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5737 }
5738
5739 return;
5740 }
5741 case PM_HASH_NODE: {
5742 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
5743
5744 if (top) {
5745 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5746 }
5747
5748 pm_compile_hash_elements(iseq, (const pm_node_t *) cast, &cast->elements, shareability, path, false, ret, scope_node);
5749
5750 if (top) {
5751 ID method_id = (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) ? rb_intern("make_shareable_copy") : rb_intern("make_shareable");
5752 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5753 }
5754
5755 return;
5756 }
5757 default: {
5758 DECL_ANCHOR(value_seq);
5759
5760 pm_compile_node(iseq, node, value_seq, false, scope_node);
5761 if (PM_NODE_TYPE_P(node, PM_INTERPOLATED_STRING_NODE)) {
5762 PUSH_SEND_WITH_FLAG(value_seq, location, idUMinus, INT2FIX(0), INT2FIX(VM_CALL_ARGS_SIMPLE));
5763 }
5764
5765 if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_LITERAL) {
5766 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5767 PUSH_SEQ(ret, value_seq);
5768 if (!RB_OBJ_SHAREABLE_P(path)) {
5769 RB_OBJ_SET_SHAREABLE(path);
5770 }
5771 PUSH_INSN1(ret, location, putobject, path);
5772 PUSH_SEND_WITH_FLAG(ret, location, rb_intern("ensure_shareable"), INT2FIX(2), INT2FIX(VM_CALL_ARGS_SIMPLE));
5773 }
5774 else if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) {
5775 if (top) PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5776 PUSH_SEQ(ret, value_seq);
5777 if (top) PUSH_SEND_WITH_FLAG(ret, location, rb_intern("make_shareable_copy"), INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5778 }
5779 else if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_EVERYTHING) {
5780 if (top) PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5781 PUSH_SEQ(ret, value_seq);
5782 if (top) PUSH_SEND_WITH_FLAG(ret, location, rb_intern("make_shareable"), INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5783 }
5784
5785 break;
5786 }
5787 }
5788}
5789
5794static void
5795pm_compile_constant_write_node(rb_iseq_t *iseq, const pm_constant_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5796{
5797 const pm_node_location_t location = *node_location;
5798 ID name_id = pm_constant_id_lookup(scope_node, node->name);
5799
5800 if (shareability != 0) {
5801 pm_compile_shareable_constant_value(iseq, node->value, shareability, rb_id2str(name_id), ret, scope_node, true);
5802 }
5803 else {
5804 PM_COMPILE_NOT_POPPED(node->value);
5805 }
5806
5807 if (!popped) PUSH_INSN(ret, location, dup);
5808 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5809
5810 VALUE operand = ID2SYM(name_id);
5811 PUSH_INSN1(ret, location, setconstant, operand);
5812}
5813
5818static void
5819pm_compile_constant_and_write_node(rb_iseq_t *iseq, const pm_constant_and_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5820{
5821 const pm_node_location_t location = *node_location;
5822
5823 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
5824 LABEL *end_label = NEW_LABEL(location.line);
5825
5826 pm_compile_constant_read(iseq, name, &node->name_loc, location.node_id, ret, scope_node);
5827 if (!popped) PUSH_INSN(ret, location, dup);
5828
5829 PUSH_INSNL(ret, location, branchunless, end_label);
5830 if (!popped) PUSH_INSN(ret, location, pop);
5831
5832 if (shareability != 0) {
5833 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
5834 }
5835 else {
5836 PM_COMPILE_NOT_POPPED(node->value);
5837 }
5838
5839 if (!popped) PUSH_INSN(ret, location, dup);
5840 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5841 PUSH_INSN1(ret, location, setconstant, name);
5842 PUSH_LABEL(ret, end_label);
5843}
5844
5849static void
5850pm_compile_constant_or_write_node(rb_iseq_t *iseq, const pm_constant_or_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5851{
5852 const pm_node_location_t location = *node_location;
5853 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
5854
5855 LABEL *set_label = NEW_LABEL(location.line);
5856 LABEL *end_label = NEW_LABEL(location.line);
5857
5858 PUSH_INSN(ret, location, putnil);
5859 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST), name, Qtrue);
5860 PUSH_INSNL(ret, location, branchunless, set_label);
5861
5862 pm_compile_constant_read(iseq, name, &node->name_loc, location.node_id, ret, scope_node);
5863 if (!popped) PUSH_INSN(ret, location, dup);
5864
5865 PUSH_INSNL(ret, location, branchif, end_label);
5866 if (!popped) PUSH_INSN(ret, location, pop);
5867 PUSH_LABEL(ret, set_label);
5868
5869 if (shareability != 0) {
5870 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
5871 }
5872 else {
5873 PM_COMPILE_NOT_POPPED(node->value);
5874 }
5875
5876 if (!popped) PUSH_INSN(ret, location, dup);
5877 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5878 PUSH_INSN1(ret, location, setconstant, name);
5879 PUSH_LABEL(ret, end_label);
5880}
5881
5886static void
5887pm_compile_constant_operator_write_node(rb_iseq_t *iseq, const pm_constant_operator_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5888{
5889 const pm_node_location_t location = *node_location;
5890
5891 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
5892 ID method_id = pm_constant_id_lookup(scope_node, node->binary_operator);
5893
5894 pm_compile_constant_read(iseq, name, &node->name_loc, location.node_id, ret, scope_node);
5895
5896 if (shareability != 0) {
5897 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
5898 }
5899 else {
5900 PM_COMPILE_NOT_POPPED(node->value);
5901 }
5902
5903 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5904 if (!popped) PUSH_INSN(ret, location, dup);
5905
5906 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5907 PUSH_INSN1(ret, location, setconstant, name);
5908}
5909
5914static VALUE
5915pm_constant_path_path(const pm_constant_path_node_t *node, const pm_scope_node_t *scope_node)
5916{
5917 VALUE parts = rb_ary_new();
5918 rb_ary_push(parts, rb_id2str(pm_constant_id_lookup(scope_node, node->name)));
5919
5920 const pm_node_t *current = node->parent;
5921 while (current != NULL && PM_NODE_TYPE_P(current, PM_CONSTANT_PATH_NODE)) {
5922 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) current;
5923 rb_ary_unshift(parts, rb_id2str(pm_constant_id_lookup(scope_node, cast->name)));
5924 current = cast->parent;
5925 }
5926
5927 if (current == NULL) {
5928 rb_ary_unshift(parts, rb_id2str(idNULL));
5929 }
5930 else if (PM_NODE_TYPE_P(current, PM_CONSTANT_READ_NODE)) {
5931 rb_ary_unshift(parts, rb_id2str(pm_constant_id_lookup(scope_node, ((const pm_constant_read_node_t *) current)->name)));
5932 }
5933 else {
5934 rb_ary_unshift(parts, rb_str_new_cstr("..."));
5935 }
5936
5937 return rb_ary_join(parts, rb_str_new_cstr("::"));
5938}
5939
5944static void
5945pm_compile_constant_path_write_node(rb_iseq_t *iseq, const pm_constant_path_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5946{
5947 const pm_node_location_t location = *node_location;
5948 const pm_constant_path_node_t *target = node->target;
5949 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
5950
5951 if (target->parent) {
5952 PM_COMPILE_NOT_POPPED((const pm_node_t *) target->parent);
5953 }
5954 else {
5955 PUSH_INSN1(ret, location, putobject, rb_cObject);
5956 }
5957
5958 if (shareability != 0) {
5959 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
5960 }
5961 else {
5962 PM_COMPILE_NOT_POPPED(node->value);
5963 }
5964
5965 if (!popped) {
5966 PUSH_INSN(ret, location, swap);
5967 PUSH_INSN1(ret, location, topn, INT2FIX(1));
5968 }
5969
5970 PUSH_INSN(ret, location, swap);
5971 PUSH_INSN1(ret, location, setconstant, name);
5972}
5973
5978static void
5979pm_compile_constant_path_and_write_node(rb_iseq_t *iseq, const pm_constant_path_and_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5980{
5981 const pm_node_location_t location = *node_location;
5982 const pm_constant_path_node_t *target = node->target;
5983
5984 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
5985 LABEL *lfin = NEW_LABEL(location.line);
5986
5987 if (target->parent) {
5988 PM_COMPILE_NOT_POPPED(target->parent);
5989 }
5990 else {
5991 PUSH_INSN1(ret, location, putobject, rb_cObject);
5992 }
5993
5994 PUSH_INSN(ret, location, dup);
5995 PUSH_INSN1(ret, location, putobject, Qtrue);
5996 PUSH_INSN1(ret, location, getconstant, name);
5997
5998 if (!popped) PUSH_INSN(ret, location, dup);
5999 PUSH_INSNL(ret, location, branchunless, lfin);
6000
6001 if (!popped) PUSH_INSN(ret, location, pop);
6002
6003 if (shareability != 0) {
6004 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
6005 }
6006 else {
6007 PM_COMPILE_NOT_POPPED(node->value);
6008 }
6009
6010 if (popped) {
6011 PUSH_INSN1(ret, location, topn, INT2FIX(1));
6012 }
6013 else {
6014 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
6015 PUSH_INSN(ret, location, swap);
6016 }
6017
6018 PUSH_INSN1(ret, location, setconstant, name);
6019 PUSH_LABEL(ret, lfin);
6020
6021 if (!popped) PUSH_INSN(ret, location, swap);
6022 PUSH_INSN(ret, location, pop);
6023}
6024
6029static void
6030pm_compile_constant_path_or_write_node(rb_iseq_t *iseq, const pm_constant_path_or_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
6031{
6032 const pm_node_location_t location = *node_location;
6033 const pm_constant_path_node_t *target = node->target;
6034
6035 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
6036 LABEL *lassign = NEW_LABEL(location.line);
6037 LABEL *lfin = NEW_LABEL(location.line);
6038
6039 if (target->parent) {
6040 PM_COMPILE_NOT_POPPED(target->parent);
6041 }
6042 else {
6043 PUSH_INSN1(ret, location, putobject, rb_cObject);
6044 }
6045
6046 PUSH_INSN(ret, location, dup);
6047 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST_FROM), name, Qtrue);
6048 PUSH_INSNL(ret, location, branchunless, lassign);
6049
6050 PUSH_INSN(ret, location, dup);
6051 PUSH_INSN1(ret, location, putobject, Qtrue);
6052 PUSH_INSN1(ret, location, getconstant, name);
6053
6054 if (!popped) PUSH_INSN(ret, location, dup);
6055 PUSH_INSNL(ret, location, branchif, lfin);
6056
6057 if (!popped) PUSH_INSN(ret, location, pop);
6058 PUSH_LABEL(ret, lassign);
6059
6060 if (shareability != 0) {
6061 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
6062 }
6063 else {
6064 PM_COMPILE_NOT_POPPED(node->value);
6065 }
6066
6067 if (popped) {
6068 PUSH_INSN1(ret, location, topn, INT2FIX(1));
6069 }
6070 else {
6071 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
6072 PUSH_INSN(ret, location, swap);
6073 }
6074
6075 PUSH_INSN1(ret, location, setconstant, name);
6076 PUSH_LABEL(ret, lfin);
6077
6078 if (!popped) PUSH_INSN(ret, location, swap);
6079 PUSH_INSN(ret, location, pop);
6080}
6081
6086static void
6087pm_compile_constant_path_operator_write_node(rb_iseq_t *iseq, const pm_constant_path_operator_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
6088{
6089 const pm_node_location_t location = *node_location;
6090 const pm_constant_path_node_t *target = node->target;
6091
6092 ID method_id = pm_constant_id_lookup(scope_node, node->binary_operator);
6093 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
6094
6095 if (target->parent) {
6096 PM_COMPILE_NOT_POPPED(target->parent);
6097 }
6098 else {
6099 PUSH_INSN1(ret, location, putobject, rb_cObject);
6100 }
6101
6102 PUSH_INSN(ret, location, dup);
6103 PUSH_INSN1(ret, location, putobject, Qtrue);
6104 PUSH_INSN1(ret, location, getconstant, name);
6105
6106 if (shareability != 0) {
6107 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
6108 }
6109 else {
6110 PM_COMPILE_NOT_POPPED(node->value);
6111 }
6112
6113 PUSH_CALL(ret, location, method_id, INT2FIX(1));
6114 PUSH_INSN(ret, location, swap);
6115
6116 if (!popped) {
6117 PUSH_INSN1(ret, location, topn, INT2FIX(1));
6118 PUSH_INSN(ret, location, swap);
6119 }
6120
6121 PUSH_INSN1(ret, location, setconstant, name);
6122}
6123
6130#define PM_CONTAINER_P(node) (PM_NODE_TYPE_P(node, PM_ARRAY_NODE) || PM_NODE_TYPE_P(node, PM_HASH_NODE) || PM_NODE_TYPE_P(node, PM_RANGE_NODE))
6131
6136static inline void
6137pm_compile_scope_node(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped)
6138{
6139 const pm_node_location_t location = *node_location;
6140 struct rb_iseq_constant_body *body = ISEQ_BODY(iseq);
6141
6142 pm_constant_id_list_t *locals = &scope_node->locals;
6143 pm_parameters_node_t *parameters_node = NULL;
6144 pm_node_list_t *keywords_list = NULL;
6145 pm_node_list_t *optionals_list = NULL;
6146 pm_node_list_t *posts_list = NULL;
6147 pm_node_list_t *requireds_list = NULL;
6148 pm_node_list_t *block_locals = NULL;
6149 bool trailing_comma = false;
6150
6151 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_CLASS_NODE) || PM_NODE_TYPE_P(scope_node->ast_node, PM_MODULE_NODE)) {
6152 PUSH_TRACE(ret, RUBY_EVENT_CLASS);
6153 }
6154
6155 if (scope_node->parameters != NULL) {
6156 switch (PM_NODE_TYPE(scope_node->parameters)) {
6157 case PM_BLOCK_PARAMETERS_NODE: {
6158 pm_block_parameters_node_t *cast = (pm_block_parameters_node_t *) scope_node->parameters;
6159 parameters_node = cast->parameters;
6160 block_locals = &cast->locals;
6161
6162 if (parameters_node) {
6163 if (parameters_node->rest && PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE)) {
6164 trailing_comma = true;
6165 }
6166 }
6167 break;
6168 }
6169 case PM_PARAMETERS_NODE: {
6170 parameters_node = (pm_parameters_node_t *) scope_node->parameters;
6171 break;
6172 }
6173 case PM_NUMBERED_PARAMETERS_NODE: {
6174 uint32_t maximum = ((const pm_numbered_parameters_node_t *) scope_node->parameters)->maximum;
6175 body->param.lead_num = maximum;
6176 body->param.flags.ambiguous_param0 = maximum == 1;
6177 break;
6178 }
6179 case PM_IT_PARAMETERS_NODE:
6180 body->param.lead_num = 1;
6181 body->param.flags.ambiguous_param0 = true;
6182 break;
6183 default:
6184 rb_bug("Unexpected node type for parameters: %s", pm_node_type_to_str(PM_NODE_TYPE(scope_node->parameters)));
6185 }
6186 }
6187
6188 struct rb_iseq_param_keyword *keyword = NULL;
6189
6190 if (parameters_node) {
6191 optionals_list = &parameters_node->optionals;
6192 requireds_list = &parameters_node->requireds;
6193 keywords_list = &parameters_node->keywords;
6194 posts_list = &parameters_node->posts;
6195 }
6196 else if (scope_node->parameters && (PM_NODE_TYPE_P(scope_node->parameters, PM_NUMBERED_PARAMETERS_NODE) || PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE))) {
6197 body->param.opt_num = 0;
6198 }
6199 else {
6200 body->param.lead_num = 0;
6201 body->param.opt_num = 0;
6202 }
6203
6204 //********STEP 1**********
6205 // Goal: calculate the table size for the locals, accounting for
6206 // hidden variables and multi target nodes
6207 size_t locals_size = locals->size;
6208
6209 // Index lookup table buffer size is only the number of the locals
6210 st_table *index_lookup_table = st_init_numtable();
6211
6212 int table_size = (int) locals_size;
6213
6214 // For nodes have a hidden iteration variable. We add that to the local
6215 // table size here.
6216 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) table_size++;
6217
6218 if (keywords_list && keywords_list->size) {
6219 table_size++;
6220 }
6221
6222 if (requireds_list) {
6223 for (size_t i = 0; i < requireds_list->size; i++) {
6224 // For each MultiTargetNode, we're going to have one
6225 // additional anonymous local not represented in the locals table
6226 // We want to account for this in our table size
6227 pm_node_t *required = requireds_list->nodes[i];
6228 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
6229 table_size++;
6230 }
6231 else if (PM_NODE_TYPE_P(required, PM_REQUIRED_PARAMETER_NODE)) {
6232 if (PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6233 table_size++;
6234 }
6235 }
6236 }
6237 }
6238
6239 // If we have the `it` implicit local variable, we need to account for
6240 // it in the local table size.
6241 if (scope_node->parameters != NULL && PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
6242 table_size++;
6243 }
6244
6245 // Ensure there is enough room in the local table for any
6246 // parameters that have been repeated
6247 // ex: def underscore_parameters(_, _ = 1, _ = 2); _; end
6248 // ^^^^^^^^^^^^
6249 if (optionals_list && optionals_list->size) {
6250 for (size_t i = 0; i < optionals_list->size; i++) {
6251 pm_node_t * node = optionals_list->nodes[i];
6252 if (PM_NODE_FLAG_P(node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6253 table_size++;
6254 }
6255 }
6256 }
6257
6258 // If we have an anonymous "rest" node, we'll need to increase the local
6259 // table size to take it in to account.
6260 // def m(foo, *, bar)
6261 // ^
6262 if (parameters_node) {
6263 if (parameters_node->rest) {
6264 if (!(PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE))) {
6265 if (!((const pm_rest_parameter_node_t *) parameters_node->rest)->name || PM_NODE_FLAG_P(parameters_node->rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6266 table_size++;
6267 }
6268 }
6269 }
6270
6271 // def foo(_, **_); _; end
6272 // ^^^
6273 if (parameters_node->keyword_rest) {
6274 // def foo(...); end
6275 // ^^^
6276 // When we have a `...` as the keyword_rest, it's a forwarding_parameter_node and
6277 // we need to leave space for 4 locals: *, **, &, ...
6278 if (PM_NODE_TYPE_P(parameters_node->keyword_rest, PM_FORWARDING_PARAMETER_NODE)) {
6279 // Only optimize specifically methods like this: `foo(...)`
6280 if (requireds_list->size == 0 && optionals_list->size == 0 && keywords_list->size == 0) {
6281 ISEQ_BODY(iseq)->param.flags.use_block = TRUE;
6282 ISEQ_BODY(iseq)->param.flags.forwardable = TRUE;
6283 table_size += 1;
6284 }
6285 else {
6286 table_size += 4;
6287 }
6288 }
6289 else {
6290 const pm_keyword_rest_parameter_node_t *kw_rest = (const pm_keyword_rest_parameter_node_t *) parameters_node->keyword_rest;
6291
6292 // If it's anonymous or repeated, then we need to allocate stack space
6293 if (!kw_rest->name || PM_NODE_FLAG_P(kw_rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6294 table_size++;
6295 }
6296 }
6297 }
6298 }
6299
6300 if (posts_list) {
6301 for (size_t i = 0; i < posts_list->size; i++) {
6302 // For each MultiTargetNode, we're going to have one
6303 // additional anonymous local not represented in the locals table
6304 // We want to account for this in our table size
6305 pm_node_t *required = posts_list->nodes[i];
6306 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE) || PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6307 table_size++;
6308 }
6309 }
6310 }
6311
6312 if (keywords_list && keywords_list->size) {
6313 for (size_t i = 0; i < keywords_list->size; i++) {
6314 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
6315 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6316 table_size++;
6317 }
6318 }
6319 }
6320
6321 if (parameters_node && parameters_node->block) {
6322 const pm_block_parameter_node_t *block_node = (const pm_block_parameter_node_t *) parameters_node->block;
6323
6324 if (PM_NODE_FLAG_P(block_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER) || !block_node->name) {
6325 table_size++;
6326 }
6327 }
6328
6329 // We can create local_table_for_iseq with the correct size
6330 VALUE idtmp = 0;
6331 rb_ast_id_table_t *local_table_for_iseq = ALLOCV(idtmp, sizeof(rb_ast_id_table_t) + table_size * sizeof(ID));
6332 local_table_for_iseq->size = table_size;
6333
6334 //********END OF STEP 1**********
6335
6336 //********STEP 2**********
6337 // Goal: populate iv index table as well as local table, keeping the
6338 // layout of the local table consistent with the layout of the
6339 // stack when calling the method
6340 //
6341 // Do a first pass on all of the parameters, setting their values in
6342 // the local_table_for_iseq, _except_ for Multis who get a hidden
6343 // variable in this step, and will get their names inserted in step 3
6344
6345 // local_index is a cursor that keeps track of the current
6346 // index into local_table_for_iseq. The local table is actually a list,
6347 // and the order of that list must match the order of the items pushed
6348 // on the stack. We need to take in to account things pushed on the
6349 // stack that _might not have a name_ (for example array destructuring).
6350 // This index helps us know which item we're dealing with and also give
6351 // those anonymous items temporary names (as below)
6352 int local_index = 0;
6353
6354 // Here we figure out local table indices and insert them in to the
6355 // index lookup table and local tables.
6356 //
6357 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6358 // ^^^^^^^^^^^^^
6359 if (requireds_list && requireds_list->size) {
6360 for (size_t i = 0; i < requireds_list->size; i++, local_index++) {
6361 ID local;
6362
6363 // For each MultiTargetNode, we're going to have one additional
6364 // anonymous local not represented in the locals table. We want
6365 // to account for this in our table size.
6366 pm_node_t *required = requireds_list->nodes[i];
6367
6368 switch (PM_NODE_TYPE(required)) {
6369 case PM_MULTI_TARGET_NODE: {
6370 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6371 // ^^^^^^^^^^
6372 local = rb_make_temporary_id(local_index);
6373 local_table_for_iseq->ids[local_index] = local;
6374 break;
6375 }
6376 case PM_REQUIRED_PARAMETER_NODE: {
6377 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6378 // ^
6379 const pm_required_parameter_node_t *param = (const pm_required_parameter_node_t *) required;
6380
6381 if (PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6382 ID local = pm_constant_id_lookup(scope_node, param->name);
6383 local_table_for_iseq->ids[local_index] = local;
6384 }
6385 else {
6386 pm_insert_local_index(param->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6387 }
6388
6389 break;
6390 }
6391 default:
6392 rb_bug("Unsupported node in requireds in parameters %s", pm_node_type_to_str(PM_NODE_TYPE(required)));
6393 }
6394 }
6395
6396 body->param.lead_num = (int) requireds_list->size;
6397 body->param.flags.has_lead = true;
6398 }
6399
6400 if (scope_node->parameters != NULL && PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
6401 ID local = rb_make_temporary_id(local_index);
6402 local_table_for_iseq->ids[local_index++] = local;
6403 }
6404
6405 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6406 // ^^^^^
6407 if (optionals_list && optionals_list->size) {
6408 body->param.opt_num = (int) optionals_list->size;
6409 body->param.flags.has_opt = true;
6410
6411 for (size_t i = 0; i < optionals_list->size; i++, local_index++) {
6412 pm_node_t * node = optionals_list->nodes[i];
6413 pm_constant_id_t name = ((const pm_optional_parameter_node_t *) node)->name;
6414
6415 if (PM_NODE_FLAG_P(node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6416 ID local = pm_constant_id_lookup(scope_node, name);
6417 local_table_for_iseq->ids[local_index] = local;
6418 }
6419 else {
6420 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6421 }
6422 }
6423 }
6424
6425 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6426 // ^^
6427 if (parameters_node && parameters_node->rest) {
6428 body->param.rest_start = local_index;
6429
6430 // If there's a trailing comma, we'll have an implicit rest node,
6431 // and we don't want it to impact the rest variables on param
6432 if (!(PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE))) {
6433 body->param.flags.has_rest = true;
6434 RUBY_ASSERT(body->param.rest_start != -1);
6435
6436 pm_constant_id_t name = ((const pm_rest_parameter_node_t *) parameters_node->rest)->name;
6437
6438 if (name) {
6439 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6440 // ^^
6441 if (PM_NODE_FLAG_P(parameters_node->rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6442 ID local = pm_constant_id_lookup(scope_node, name);
6443 local_table_for_iseq->ids[local_index] = local;
6444 }
6445 else {
6446 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6447 }
6448 }
6449 else {
6450 // def foo(a, (b, *c, d), e = 1, *, g, (h, *i, j), k:, l: 1, **m, &n)
6451 // ^
6452 body->param.flags.anon_rest = true;
6453 pm_insert_local_special(idMULT, local_index, index_lookup_table, local_table_for_iseq);
6454 }
6455
6456 local_index++;
6457 }
6458 }
6459
6460 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6461 // ^^^^^^^^^^^^^
6462 if (posts_list && posts_list->size) {
6463 body->param.post_num = (int) posts_list->size;
6464 body->param.post_start = local_index;
6465 body->param.flags.has_post = true;
6466
6467 for (size_t i = 0; i < posts_list->size; i++, local_index++) {
6468 ID local;
6469
6470 // For each MultiTargetNode, we're going to have one additional
6471 // anonymous local not represented in the locals table. We want
6472 // to account for this in our table size.
6473 const pm_node_t *post_node = posts_list->nodes[i];
6474
6475 switch (PM_NODE_TYPE(post_node)) {
6476 case PM_MULTI_TARGET_NODE: {
6477 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6478 // ^^^^^^^^^^
6479 local = rb_make_temporary_id(local_index);
6480 local_table_for_iseq->ids[local_index] = local;
6481 break;
6482 }
6483 case PM_REQUIRED_PARAMETER_NODE: {
6484 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6485 // ^
6486 const pm_required_parameter_node_t *param = (const pm_required_parameter_node_t *) post_node;
6487
6488 if (PM_NODE_FLAG_P(param, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6489 ID local = pm_constant_id_lookup(scope_node, param->name);
6490 local_table_for_iseq->ids[local_index] = local;
6491 }
6492 else {
6493 pm_insert_local_index(param->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6494 }
6495 break;
6496 }
6497 default:
6498 rb_bug("Unsupported node in posts in parameters %s", pm_node_type_to_str(PM_NODE_TYPE(post_node)));
6499 }
6500 }
6501 }
6502
6503 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6504 // ^^^^^^^^
6505 // Keywords create an internal variable on the parse tree
6506 if (keywords_list && keywords_list->size) {
6507 keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
6508 keyword->num = (int) keywords_list->size;
6509
6510 const VALUE default_values = rb_ary_hidden_new(1);
6511 const VALUE complex_mark = rb_str_tmp_new(0);
6512
6513 for (size_t i = 0; i < keywords_list->size; i++) {
6514 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
6515 pm_constant_id_t name;
6516
6517 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6518 // ^^
6519 if (PM_NODE_TYPE_P(keyword_parameter_node, PM_REQUIRED_KEYWORD_PARAMETER_NODE)) {
6520 name = ((const pm_required_keyword_parameter_node_t *) keyword_parameter_node)->name;
6521 keyword->required_num++;
6522 ID local = pm_constant_id_lookup(scope_node, name);
6523
6524 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6525 local_table_for_iseq->ids[local_index] = local;
6526 }
6527 else {
6528 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6529 }
6530 local_index++;
6531 }
6532 }
6533
6534 for (size_t i = 0; i < keywords_list->size; i++) {
6535 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
6536 pm_constant_id_t name;
6537
6538 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6539 // ^^^^
6540 if (PM_NODE_TYPE_P(keyword_parameter_node, PM_OPTIONAL_KEYWORD_PARAMETER_NODE)) {
6541 const pm_optional_keyword_parameter_node_t *cast = ((const pm_optional_keyword_parameter_node_t *) keyword_parameter_node);
6542
6543 pm_node_t *value = cast->value;
6544 name = cast->name;
6545
6546 if (PM_NODE_FLAG_P(value, PM_NODE_FLAG_STATIC_LITERAL) && !PM_CONTAINER_P(value)) {
6547 rb_ary_push(default_values, pm_static_literal_value(iseq, value, scope_node));
6548 }
6549 else {
6550 rb_ary_push(default_values, complex_mark);
6551 }
6552
6553 ID local = pm_constant_id_lookup(scope_node, name);
6554 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6555 local_table_for_iseq->ids[local_index] = local;
6556 }
6557 else {
6558 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6559 }
6560 local_index++;
6561 }
6562
6563 }
6564
6565 if (RARRAY_LEN(default_values)) {
6566 VALUE *dvs = ALLOC_N(VALUE, RARRAY_LEN(default_values));
6567
6568 for (int i = 0; i < RARRAY_LEN(default_values); i++) {
6569 VALUE dv = RARRAY_AREF(default_values, i);
6570 if (dv == complex_mark) dv = Qundef;
6571 RB_OBJ_WRITE(iseq, &dvs[i], dv);
6572 }
6573
6574 keyword->default_values = dvs;
6575 }
6576
6577 // Hidden local for keyword arguments
6578 keyword->bits_start = local_index;
6579 ID local = rb_make_temporary_id(local_index);
6580 local_table_for_iseq->ids[local_index] = local;
6581 local_index++;
6582
6583 body->param.keyword = keyword;
6584 body->param.flags.has_kw = true;
6585 }
6586
6587 if (body->type == ISEQ_TYPE_BLOCK && local_index == 1 && requireds_list && requireds_list->size == 1 && !trailing_comma) {
6588 body->param.flags.ambiguous_param0 = true;
6589 }
6590
6591 if (parameters_node) {
6592 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6593 // ^^^
6594 if (parameters_node->keyword_rest) {
6595 switch (PM_NODE_TYPE(parameters_node->keyword_rest)) {
6596 case PM_NO_KEYWORDS_PARAMETER_NODE: {
6597 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **nil, &n)
6598 // ^^^^^
6599 body->param.flags.accepts_no_kwarg = true;
6600 break;
6601 }
6602 case PM_KEYWORD_REST_PARAMETER_NODE: {
6603 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6604 // ^^^
6605 const pm_keyword_rest_parameter_node_t *kw_rest_node = (const pm_keyword_rest_parameter_node_t *) parameters_node->keyword_rest;
6606 if (!body->param.flags.has_kw) {
6607 body->param.keyword = keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
6608 }
6609
6610 keyword->rest_start = local_index;
6611 body->param.flags.has_kwrest = true;
6612
6613 pm_constant_id_t constant_id = kw_rest_node->name;
6614 if (constant_id) {
6615 if (PM_NODE_FLAG_P(kw_rest_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6616 ID local = pm_constant_id_lookup(scope_node, constant_id);
6617 local_table_for_iseq->ids[local_index] = local;
6618 }
6619 else {
6620 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6621 }
6622 }
6623 else {
6624 body->param.flags.anon_kwrest = true;
6625 pm_insert_local_special(idPow, local_index, index_lookup_table, local_table_for_iseq);
6626 }
6627
6628 local_index++;
6629 break;
6630 }
6631 case PM_FORWARDING_PARAMETER_NODE: {
6632 // def foo(...)
6633 // ^^^
6634 if (!ISEQ_BODY(iseq)->param.flags.forwardable) {
6635 // Add the anonymous *
6636 body->param.rest_start = local_index;
6637 body->param.flags.has_rest = true;
6638 body->param.flags.anon_rest = true;
6639 pm_insert_local_special(idMULT, local_index++, index_lookup_table, local_table_for_iseq);
6640
6641 // Add the anonymous **
6642 RUBY_ASSERT(!body->param.flags.has_kw);
6643 body->param.flags.has_kw = false;
6644 body->param.flags.has_kwrest = true;
6645 body->param.flags.anon_kwrest = true;
6646 body->param.keyword = keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
6647 keyword->rest_start = local_index;
6648 pm_insert_local_special(idPow, local_index++, index_lookup_table, local_table_for_iseq);
6649
6650 // Add the anonymous &
6651 body->param.block_start = local_index;
6652 body->param.flags.has_block = true;
6653 pm_insert_local_special(idAnd, local_index++, index_lookup_table, local_table_for_iseq);
6654 }
6655
6656 // Add the ...
6657 pm_insert_local_special(idDot3, local_index++, index_lookup_table, local_table_for_iseq);
6658 break;
6659 }
6660 default:
6661 rb_bug("node type %s not expected as keyword_rest", pm_node_type_to_str(PM_NODE_TYPE(parameters_node->keyword_rest)));
6662 }
6663 }
6664
6665 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6666 // ^^
6667 if (parameters_node->block) {
6668 body->param.block_start = local_index;
6669 body->param.flags.has_block = true;
6670 iseq_set_use_block(iseq);
6671
6672 pm_constant_id_t name = ((const pm_block_parameter_node_t *) parameters_node->block)->name;
6673
6674 if (name) {
6675 if (PM_NODE_FLAG_P(parameters_node->block, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6676 ID local = pm_constant_id_lookup(scope_node, name);
6677 local_table_for_iseq->ids[local_index] = local;
6678 }
6679 else {
6680 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6681 }
6682 }
6683 else {
6684 pm_insert_local_special(idAnd, local_index, index_lookup_table, local_table_for_iseq);
6685 }
6686
6687 local_index++;
6688 }
6689 }
6690
6691 //********END OF STEP 2**********
6692 // The local table is now consistent with expected
6693 // stack layout
6694
6695 // If there's only one required element in the parameters
6696 // CRuby needs to recognize it as an ambiguous parameter
6697
6698 //********STEP 3**********
6699 // Goal: fill in the names of the parameters in MultiTargetNodes
6700 //
6701 // Go through requireds again to set the multis
6702
6703 if (requireds_list && requireds_list->size) {
6704 for (size_t i = 0; i < requireds_list->size; i++) {
6705 // For each MultiTargetNode, we're going to have one
6706 // additional anonymous local not represented in the locals table
6707 // We want to account for this in our table size
6708 const pm_node_t *required = requireds_list->nodes[i];
6709
6710 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
6711 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) required, index_lookup_table, local_table_for_iseq, scope_node, local_index);
6712 }
6713 }
6714 }
6715
6716 // Go through posts again to set the multis
6717 if (posts_list && posts_list->size) {
6718 for (size_t i = 0; i < posts_list->size; i++) {
6719 // For each MultiTargetNode, we're going to have one
6720 // additional anonymous local not represented in the locals table
6721 // We want to account for this in our table size
6722 const pm_node_t *post = posts_list->nodes[i];
6723
6724 if (PM_NODE_TYPE_P(post, PM_MULTI_TARGET_NODE)) {
6725 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) post, index_lookup_table, local_table_for_iseq, scope_node, local_index);
6726 }
6727 }
6728 }
6729
6730 // Set any anonymous locals for the for node
6731 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) {
6732 if (PM_NODE_TYPE_P(((const pm_for_node_t *) scope_node->ast_node)->index, PM_LOCAL_VARIABLE_TARGET_NODE)) {
6733 body->param.lead_num++;
6734 }
6735 else {
6736 body->param.rest_start = local_index;
6737 body->param.flags.has_rest = true;
6738 }
6739
6740 ID local = rb_make_temporary_id(local_index);
6741 local_table_for_iseq->ids[local_index] = local;
6742 local_index++;
6743 }
6744
6745 // Fill in any NumberedParameters, if they exist
6746 if (scope_node->parameters && PM_NODE_TYPE_P(scope_node->parameters, PM_NUMBERED_PARAMETERS_NODE)) {
6747 int maximum = ((const pm_numbered_parameters_node_t *) scope_node->parameters)->maximum;
6748 RUBY_ASSERT(0 < maximum && maximum <= 9);
6749 for (int i = 0; i < maximum; i++, local_index++) {
6750 const uint8_t param_name[] = { '_', '1' + i };
6751 pm_constant_id_t constant_id = pm_constant_pool_find(&scope_node->parser->constant_pool, param_name, 2);
6752 RUBY_ASSERT(constant_id && "parser should fill in any gaps in numbered parameters");
6753 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6754 }
6755 body->param.lead_num = maximum;
6756 body->param.flags.has_lead = true;
6757 }
6758
6759 // Fill in the anonymous `it` parameter, if it exists
6760 if (scope_node->parameters && PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
6761 body->param.lead_num = 1;
6762 body->param.flags.has_lead = true;
6763 }
6764
6765 //********END OF STEP 3**********
6766
6767 //********STEP 4**********
6768 // Goal: fill in the method body locals
6769 // To be explicit, these are the non-parameter locals
6770 // We fill in the block_locals, if they exist
6771 // lambda { |x; y| y }
6772 // ^
6773 if (block_locals && block_locals->size) {
6774 for (size_t i = 0; i < block_locals->size; i++, local_index++) {
6775 pm_constant_id_t constant_id = ((const pm_block_local_variable_node_t *) block_locals->nodes[i])->name;
6776 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6777 }
6778 }
6779
6780 // Fill in any locals we missed
6781 if (scope_node->locals.size) {
6782 for (size_t i = 0; i < scope_node->locals.size; i++) {
6783 pm_constant_id_t constant_id = locals->ids[i];
6784 if (constant_id) {
6785 struct pm_local_table_insert_ctx ctx;
6786 ctx.scope_node = scope_node;
6787 ctx.local_table_for_iseq = local_table_for_iseq;
6788 ctx.local_index = local_index;
6789
6790 st_update(index_lookup_table, (st_data_t)constant_id, pm_local_table_insert_func, (st_data_t)&ctx);
6791
6792 local_index = ctx.local_index;
6793 }
6794 }
6795 }
6796
6797 //********END OF STEP 4**********
6798
6799 // We set the index_lookup_table on the scope node so we can
6800 // refer to the parameters correctly
6801 if (scope_node->index_lookup_table) {
6802 st_free_table(scope_node->index_lookup_table);
6803 }
6804 scope_node->index_lookup_table = index_lookup_table;
6805 iseq_calc_param_size(iseq);
6806
6807 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
6808 // We're treating `...` as a parameter so that frame
6809 // pushing won't clobber it.
6810 ISEQ_BODY(iseq)->param.size += 1;
6811 }
6812
6813 // FIXME: args?
6814 iseq_set_local_table(iseq, local_table_for_iseq, 0);
6815 iseq_set_parameters_lvar_state(iseq);
6816
6817 scope_node->local_table_for_iseq_size = local_table_for_iseq->size;
6818
6819 if (keyword != NULL) {
6820 size_t keyword_start_index = keyword->bits_start - keyword->num;
6821 keyword->table = (ID *)&ISEQ_BODY(iseq)->local_table[keyword_start_index];
6822 }
6823
6824 //********STEP 5************
6825 // Goal: compile anything that needed to be compiled
6826 if (optionals_list && optionals_list->size) {
6827 LABEL **opt_table = (LABEL **) ALLOC_N(VALUE, optionals_list->size + 1);
6828 LABEL *label;
6829
6830 // TODO: Should we make an api for NEW_LABEL where you can pass
6831 // a pointer to the label it should fill out? We already
6832 // have a list of labels allocated above so it seems wasteful
6833 // to do the copies.
6834 for (size_t i = 0; i < optionals_list->size; i++) {
6835 label = NEW_LABEL(location.line);
6836 opt_table[i] = label;
6837 PUSH_LABEL(ret, label);
6838 pm_node_t *optional_node = optionals_list->nodes[i];
6839 PM_COMPILE_NOT_POPPED(optional_node);
6840 }
6841
6842 // Set the last label
6843 label = NEW_LABEL(location.line);
6844 opt_table[optionals_list->size] = label;
6845 PUSH_LABEL(ret, label);
6846
6847 body->param.opt_table = (const VALUE *) opt_table;
6848 }
6849
6850 if (keywords_list && keywords_list->size) {
6851 size_t optional_index = 0;
6852 for (size_t i = 0; i < keywords_list->size; i++) {
6853 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
6854 pm_constant_id_t name;
6855
6856 switch (PM_NODE_TYPE(keyword_parameter_node)) {
6857 case PM_OPTIONAL_KEYWORD_PARAMETER_NODE: {
6858 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6859 // ^^^^
6860 const pm_optional_keyword_parameter_node_t *cast = ((const pm_optional_keyword_parameter_node_t *) keyword_parameter_node);
6861
6862 pm_node_t *value = cast->value;
6863 name = cast->name;
6864
6865 if (!PM_NODE_FLAG_P(value, PM_NODE_FLAG_STATIC_LITERAL) || PM_CONTAINER_P(value)) {
6866 LABEL *end_label = NEW_LABEL(location.line);
6867
6868 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, name, 0);
6869 int kw_bits_idx = table_size - body->param.keyword->bits_start;
6870 PUSH_INSN2(ret, location, checkkeyword, INT2FIX(kw_bits_idx + VM_ENV_DATA_SIZE - 1), INT2FIX(optional_index));
6871 PUSH_INSNL(ret, location, branchif, end_label);
6872 PM_COMPILE(value);
6873 PUSH_SETLOCAL(ret, location, index.index, index.level);
6874 PUSH_LABEL(ret, end_label);
6875 }
6876 optional_index++;
6877 break;
6878 }
6879 case PM_REQUIRED_KEYWORD_PARAMETER_NODE:
6880 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6881 // ^^
6882 break;
6883 default:
6884 rb_bug("Unexpected keyword parameter node type %s", pm_node_type_to_str(PM_NODE_TYPE(keyword_parameter_node)));
6885 }
6886 }
6887 }
6888
6889 if (requireds_list && requireds_list->size) {
6890 for (size_t i = 0; i < requireds_list->size; i++) {
6891 // For each MultiTargetNode, we're going to have one additional
6892 // anonymous local not represented in the locals table. We want
6893 // to account for this in our table size.
6894 const pm_node_t *required = requireds_list->nodes[i];
6895
6896 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
6897 PUSH_GETLOCAL(ret, location, table_size - (int)i, 0);
6898 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) required, ret, scope_node);
6899 }
6900 }
6901 }
6902
6903 if (posts_list && posts_list->size) {
6904 for (size_t i = 0; i < posts_list->size; i++) {
6905 // For each MultiTargetNode, we're going to have one additional
6906 // anonymous local not represented in the locals table. We want
6907 // to account for this in our table size.
6908 const pm_node_t *post = posts_list->nodes[i];
6909
6910 if (PM_NODE_TYPE_P(post, PM_MULTI_TARGET_NODE)) {
6911 PUSH_GETLOCAL(ret, location, table_size - body->param.post_start - (int) i, 0);
6912 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) post, ret, scope_node);
6913 }
6914 }
6915 }
6916
6917 switch (body->type) {
6918 case ISEQ_TYPE_PLAIN: {
6919 RUBY_ASSERT(PM_NODE_TYPE_P(scope_node->ast_node, PM_INTERPOLATED_REGULAR_EXPRESSION_NODE));
6920
6922 pm_compile_regexp_dynamic(iseq, (const pm_node_t *) cast, &cast->parts, &location, ret, popped, scope_node);
6923
6924 break;
6925 }
6926 case ISEQ_TYPE_BLOCK: {
6927 LABEL *start = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(0);
6928 LABEL *end = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(0);
6929 const pm_node_location_t block_location = { .line = body->location.first_lineno, .node_id = scope_node->ast_node->node_id };
6930
6931 start->rescued = LABEL_RESCUE_BEG;
6932 end->rescued = LABEL_RESCUE_END;
6933
6934 // For nodes automatically assign the iteration variable to whatever
6935 // index variable. We need to handle that write here because it has
6936 // to happen in the context of the block. Note that this happens
6937 // before the B_CALL tracepoint event.
6938 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) {
6939 pm_compile_for_node_index(iseq, ((const pm_for_node_t *) scope_node->ast_node)->index, ret, scope_node);
6940 }
6941
6942 PUSH_TRACE(ret, RUBY_EVENT_B_CALL);
6943 PUSH_INSN(ret, block_location, nop);
6944 PUSH_LABEL(ret, start);
6945
6946 if (scope_node->body != NULL) {
6947 switch (PM_NODE_TYPE(scope_node->ast_node)) {
6948 case PM_POST_EXECUTION_NODE: {
6949 const pm_post_execution_node_t *cast = (const pm_post_execution_node_t *) scope_node->ast_node;
6950 PUSH_INSN1(ret, block_location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6951
6952 // We create another ScopeNode from the statements within the PostExecutionNode
6953 pm_scope_node_t next_scope_node;
6954 pm_scope_node_init((const pm_node_t *) cast->statements, &next_scope_node, scope_node);
6955
6956 const rb_iseq_t *block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(body->parent_iseq), ISEQ_TYPE_BLOCK, location.line);
6957 pm_scope_node_destroy(&next_scope_node);
6958
6959 PUSH_CALL_WITH_BLOCK(ret, block_location, id_core_set_postexe, INT2FIX(0), block);
6960 break;
6961 }
6962 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
6964 pm_compile_regexp_dynamic(iseq, (const pm_node_t *) cast, &cast->parts, &location, ret, popped, scope_node);
6965 break;
6966 }
6967 default:
6968 pm_compile_node(iseq, scope_node->body, ret, popped, scope_node);
6969 break;
6970 }
6971 }
6972 else {
6973 PUSH_INSN(ret, block_location, putnil);
6974 }
6975
6976 PUSH_LABEL(ret, end);
6977 PUSH_TRACE(ret, RUBY_EVENT_B_RETURN);
6978 ISEQ_COMPILE_DATA(iseq)->last_line = body->location.code_location.end_pos.lineno;
6979
6980 /* wide range catch handler must put at last */
6981 PUSH_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
6982 PUSH_CATCH_ENTRY(CATCH_TYPE_NEXT, start, end, NULL, end);
6983 break;
6984 }
6985 case ISEQ_TYPE_ENSURE: {
6986 const pm_node_location_t statements_location = (scope_node->body != NULL ? PM_NODE_START_LOCATION(scope_node->parser, scope_node->body) : location);
6987 iseq_set_exception_local_table(iseq);
6988
6989 if (scope_node->body != NULL) {
6990 PM_COMPILE_POPPED((const pm_node_t *) scope_node->body);
6991 }
6992
6993 PUSH_GETLOCAL(ret, statements_location, 1, 0);
6994 PUSH_INSN1(ret, statements_location, throw, INT2FIX(0));
6995 return;
6996 }
6997 case ISEQ_TYPE_METHOD: {
6998 ISEQ_COMPILE_DATA(iseq)->root_node = (const void *) scope_node->body;
6999 PUSH_TRACE(ret, RUBY_EVENT_CALL);
7000
7001 if (scope_node->body) {
7002 PM_COMPILE((const pm_node_t *) scope_node->body);
7003 }
7004 else {
7005 PUSH_INSN(ret, location, putnil);
7006 }
7007
7008 ISEQ_COMPILE_DATA(iseq)->root_node = (const void *) scope_node->body;
7009 PUSH_TRACE(ret, RUBY_EVENT_RETURN);
7010
7011 ISEQ_COMPILE_DATA(iseq)->last_line = body->location.code_location.end_pos.lineno;
7012 break;
7013 }
7014 case ISEQ_TYPE_RESCUE: {
7015 iseq_set_exception_local_table(iseq);
7016 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_RESCUE_MODIFIER_NODE)) {
7017 LABEL *lab = NEW_LABEL(location.line);
7018 LABEL *rescue_end = NEW_LABEL(location.line);
7019 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
7020 PUSH_INSN1(ret, location, putobject, rb_eStandardError);
7021 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
7022 PUSH_INSNL(ret, location, branchif, lab);
7023 PUSH_INSNL(ret, location, jump, rescue_end);
7024 PUSH_LABEL(ret, lab);
7025 PUSH_TRACE(ret, RUBY_EVENT_RESCUE);
7026 PM_COMPILE((const pm_node_t *) scope_node->body);
7027 PUSH_INSN(ret, location, leave);
7028 PUSH_LABEL(ret, rescue_end);
7029 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
7030 }
7031 else {
7032 PM_COMPILE((const pm_node_t *) scope_node->ast_node);
7033 }
7034 PUSH_INSN1(ret, location, throw, INT2FIX(0));
7035
7036 return;
7037 }
7038 default:
7039 if (scope_node->body) {
7040 PM_COMPILE((const pm_node_t *) scope_node->body);
7041 }
7042 else {
7043 PUSH_INSN(ret, location, putnil);
7044 }
7045 break;
7046 }
7047
7048 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_CLASS_NODE) || PM_NODE_TYPE_P(scope_node->ast_node, PM_MODULE_NODE)) {
7049 const pm_node_location_t end_location = PM_NODE_END_LOCATION(scope_node->parser, scope_node->ast_node);
7050 PUSH_TRACE(ret, RUBY_EVENT_END);
7051 ISEQ_COMPILE_DATA(iseq)->last_line = end_location.line;
7052 }
7053
7054 if (!PM_NODE_TYPE_P(scope_node->ast_node, PM_ENSURE_NODE)) {
7055 const pm_node_location_t location = { .line = ISEQ_COMPILE_DATA(iseq)->last_line, .node_id = scope_node->ast_node->node_id };
7056 PUSH_INSN(ret, location, leave);
7057 }
7058}
7059
7060static inline void
7061pm_compile_alias_global_variable_node(rb_iseq_t *iseq, const pm_alias_global_variable_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7062{
7063 // alias $foo $bar
7064 // ^^^^^^^^^^^^^^^
7065 PUSH_INSN1(ret, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7066
7067 {
7068 const pm_location_t *name_loc = &node->new_name->location;
7069 VALUE operand = ID2SYM(rb_intern3((const char *) name_loc->start, name_loc->end - name_loc->start, scope_node->encoding));
7070 PUSH_INSN1(ret, *location, putobject, operand);
7071 }
7072
7073 {
7074 const pm_location_t *name_loc = &node->old_name->location;
7075 VALUE operand = ID2SYM(rb_intern3((const char *) name_loc->start, name_loc->end - name_loc->start, scope_node->encoding));
7076 PUSH_INSN1(ret, *location, putobject, operand);
7077 }
7078
7079 PUSH_SEND(ret, *location, id_core_set_variable_alias, INT2FIX(2));
7080 if (popped) PUSH_INSN(ret, *location, pop);
7081}
7082
7083static inline void
7084pm_compile_alias_method_node(rb_iseq_t *iseq, const pm_alias_method_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7085{
7086 PUSH_INSN1(ret, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7087 PUSH_INSN1(ret, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CBASE));
7088 PM_COMPILE_NOT_POPPED(node->new_name);
7089 PM_COMPILE_NOT_POPPED(node->old_name);
7090
7091 PUSH_SEND(ret, *location, id_core_set_method_alias, INT2FIX(3));
7092 if (popped) PUSH_INSN(ret, *location, pop);
7093}
7094
7095static inline void
7096pm_compile_and_node(rb_iseq_t *iseq, const pm_and_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7097{
7098 LABEL *end_label = NEW_LABEL(location->line);
7099
7100 PM_COMPILE_NOT_POPPED(node->left);
7101 if (!popped) PUSH_INSN(ret, *location, dup);
7102 PUSH_INSNL(ret, *location, branchunless, end_label);
7103
7104 if (!popped) PUSH_INSN(ret, *location, pop);
7105 PM_COMPILE(node->right);
7106 PUSH_LABEL(ret, end_label);
7107}
7108
7109static inline void
7110pm_compile_array_node(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_list_t *elements, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7111{
7112 // If every node in the array is static, then we can compile the entire
7113 // array now instead of later.
7114 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
7115 // We're only going to compile this node if it's not popped. If it
7116 // is popped, then we know we don't need to do anything since it's
7117 // statically known.
7118 if (!popped) {
7119 if (elements->size) {
7120 VALUE value = pm_static_literal_value(iseq, node, scope_node);
7121 RB_OBJ_SET_FROZEN_SHAREABLE(value);
7122 PUSH_INSN1(ret, *location, duparray, value);
7123 }
7124 else {
7125 PUSH_INSN1(ret, *location, newarray, INT2FIX(0));
7126 }
7127 }
7128 return;
7129 }
7130
7131 // Here since we know there are possible side-effects inside the
7132 // array contents, we're going to build it entirely at runtime.
7133 // We'll do this by pushing all of the elements onto the stack and
7134 // then combining them with newarray.
7135 //
7136 // If this array is popped, then this serves only to ensure we enact
7137 // all side-effects (like method calls) that are contained within
7138 // the array contents.
7139 //
7140 // We treat all sequences of non-splat elements as their
7141 // own arrays, followed by a newarray, and then continually
7142 // concat the arrays with the SplatNode nodes.
7143 const int max_new_array_size = 0x100;
7144 const unsigned int min_tmp_array_size = 0x40;
7145
7146 int new_array_size = 0;
7147 bool first_chunk = true;
7148
7149 // This is an optimization wherein we keep track of whether or not
7150 // the previous element was a static literal. If it was, then we do
7151 // not attempt to check if we have a subarray that can be optimized.
7152 // If it was not, then we do check.
7153 bool static_literal = false;
7154
7155 // Either create a new array, or push to the existing array.
7156#define FLUSH_CHUNK \
7157 if (new_array_size) { \
7158 if (first_chunk) PUSH_INSN1(ret, *location, newarray, INT2FIX(new_array_size)); \
7159 else PUSH_INSN1(ret, *location, pushtoarray, INT2FIX(new_array_size)); \
7160 first_chunk = false; \
7161 new_array_size = 0; \
7162 }
7163
7164 for (size_t index = 0; index < elements->size; index++) {
7165 const pm_node_t *element = elements->nodes[index];
7166
7167 if (PM_NODE_TYPE_P(element, PM_SPLAT_NODE)) {
7168 FLUSH_CHUNK;
7169
7170 const pm_splat_node_t *splat_element = (const pm_splat_node_t *) element;
7171 if (splat_element->expression) {
7172 PM_COMPILE_NOT_POPPED(splat_element->expression);
7173 }
7174 else {
7175 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
7176 PUSH_GETLOCAL(ret, *location, index.index, index.level);
7177 }
7178
7179 if (first_chunk) {
7180 // If this is the first element of the array then we
7181 // need to splatarray the elements into the list.
7182 PUSH_INSN1(ret, *location, splatarray, Qtrue);
7183 first_chunk = false;
7184 }
7185 else {
7186 PUSH_INSN(ret, *location, concattoarray);
7187 }
7188
7189 static_literal = false;
7190 }
7191 else if (PM_NODE_TYPE_P(element, PM_KEYWORD_HASH_NODE)) {
7192 if (new_array_size == 0 && first_chunk) {
7193 PUSH_INSN1(ret, *location, newarray, INT2FIX(0));
7194 first_chunk = false;
7195 }
7196 else {
7197 FLUSH_CHUNK;
7198 }
7199
7200 // If we get here, then this is the last element of the
7201 // array/arguments, because it cannot be followed by
7202 // anything else without a syntax error. This looks like:
7203 //
7204 // [foo, bar, baz: qux]
7205 // ^^^^^^^^
7206 //
7207 // [foo, bar, **baz]
7208 // ^^^^^
7209 //
7210 const pm_keyword_hash_node_t *keyword_hash = (const pm_keyword_hash_node_t *) element;
7211 pm_compile_hash_elements(iseq, element, &keyword_hash->elements, 0, Qundef, false, ret, scope_node);
7212
7213 // This boolean controls the manner in which we push the
7214 // hash onto the array. If it's all keyword splats, then we
7215 // can use the very specialized pushtoarraykwsplat
7216 // instruction to check if it's empty before we push it.
7217 size_t splats = 0;
7218 while (splats < keyword_hash->elements.size && PM_NODE_TYPE_P(keyword_hash->elements.nodes[splats], PM_ASSOC_SPLAT_NODE)) splats++;
7219
7220 if (keyword_hash->elements.size == splats) {
7221 PUSH_INSN(ret, *location, pushtoarraykwsplat);
7222 }
7223 else {
7224 new_array_size++;
7225 }
7226 }
7227 else if (
7228 PM_NODE_FLAG_P(element, PM_NODE_FLAG_STATIC_LITERAL) &&
7229 !PM_CONTAINER_P(element) &&
7230 !static_literal &&
7231 ((index + min_tmp_array_size) < elements->size)
7232 ) {
7233 // If we have a static literal, then there's the potential
7234 // to group a bunch of them together with a literal array
7235 // and then concat them together.
7236 size_t right_index = index + 1;
7237 while (
7238 right_index < elements->size &&
7239 PM_NODE_FLAG_P(elements->nodes[right_index], PM_NODE_FLAG_STATIC_LITERAL) &&
7240 !PM_CONTAINER_P(elements->nodes[right_index])
7241 ) right_index++;
7242
7243 size_t tmp_array_size = right_index - index;
7244 if (tmp_array_size >= min_tmp_array_size) {
7245 VALUE tmp_array = rb_ary_hidden_new(tmp_array_size);
7246
7247 // Create the temporary array.
7248 for (; tmp_array_size; tmp_array_size--)
7249 rb_ary_push(tmp_array, pm_static_literal_value(iseq, elements->nodes[index++], scope_node));
7250
7251 index--; // about to be incremented by for loop
7252 RB_OBJ_SET_FROZEN_SHAREABLE(tmp_array);
7253
7254 // Emit the optimized code.
7255 FLUSH_CHUNK;
7256 if (first_chunk) {
7257 PUSH_INSN1(ret, *location, duparray, tmp_array);
7258 first_chunk = false;
7259 }
7260 else {
7261 PUSH_INSN1(ret, *location, putobject, tmp_array);
7262 PUSH_INSN(ret, *location, concattoarray);
7263 }
7264 }
7265 else {
7266 PM_COMPILE_NOT_POPPED(element);
7267 if (++new_array_size >= max_new_array_size) FLUSH_CHUNK;
7268 static_literal = true;
7269 }
7270 } else {
7271 PM_COMPILE_NOT_POPPED(element);
7272 if (++new_array_size >= max_new_array_size) FLUSH_CHUNK;
7273 static_literal = false;
7274 }
7275 }
7276
7277 FLUSH_CHUNK;
7278 if (popped) PUSH_INSN(ret, *location, pop);
7279
7280#undef FLUSH_CHUNK
7281}
7282
7283static inline void
7284pm_compile_break_node(rb_iseq_t *iseq, const pm_break_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7285{
7286 unsigned long throw_flag = 0;
7287
7288 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
7289 /* while/until */
7290 LABEL *splabel = NEW_LABEL(0);
7291 PUSH_LABEL(ret, splabel);
7292 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->redo_label);
7293
7294 if (node->arguments != NULL) {
7295 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
7296 }
7297 else {
7298 PUSH_INSN(ret, *location, putnil);
7299 }
7300
7301 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
7302 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
7303 PUSH_ADJUST_RESTORE(ret, splabel);
7304 if (!popped) PUSH_INSN(ret, *location, putnil);
7305 }
7306 else {
7307 const rb_iseq_t *ip = iseq;
7308
7309 while (ip) {
7310 if (!ISEQ_COMPILE_DATA(ip)) {
7311 ip = 0;
7312 break;
7313 }
7314
7315 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
7316 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
7317 }
7318 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
7319 throw_flag = 0;
7320 }
7321 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
7322 COMPILE_ERROR(iseq, location->line, "Invalid break");
7323 return;
7324 }
7325 else {
7326 ip = ISEQ_BODY(ip)->parent_iseq;
7327 continue;
7328 }
7329
7330 /* escape from block */
7331 if (node->arguments != NULL) {
7332 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
7333 }
7334 else {
7335 PUSH_INSN(ret, *location, putnil);
7336 }
7337
7338 PUSH_INSN1(ret, *location, throw, INT2FIX(throw_flag | TAG_BREAK));
7339 if (popped) PUSH_INSN(ret, *location, pop);
7340
7341 return;
7342 }
7343
7344 COMPILE_ERROR(iseq, location->line, "Invalid break");
7345 }
7346}
7347
7348static inline void
7349pm_compile_call_node(rb_iseq_t *iseq, const pm_call_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7350{
7351 ID method_id = pm_constant_id_lookup(scope_node, node->name);
7352
7353 const pm_location_t *message_loc = &node->message_loc;
7354 if (message_loc->start == NULL) message_loc = &node->base.location;
7355
7356 const pm_node_location_t location = PM_LOCATION_START_LOCATION(scope_node->parser, message_loc, node->base.node_id);
7357 const char *builtin_func;
7358
7359 if (UNLIKELY(iseq_has_builtin_function_table(iseq)) && (builtin_func = pm_iseq_builtin_function_name(scope_node, node->receiver, method_id)) != NULL) {
7360 pm_compile_builtin_function_call(iseq, ret, scope_node, node, &location, popped, ISEQ_COMPILE_DATA(iseq)->current_block, builtin_func);
7361 return;
7362 }
7363
7364 LABEL *start = NEW_LABEL(location.line);
7365 if (node->block) PUSH_LABEL(ret, start);
7366
7367 switch (method_id) {
7368 case idUMinus: {
7369 if (pm_opt_str_freeze_p(iseq, node)) {
7370 VALUE value = parse_static_literal_string(iseq, scope_node, node->receiver, &((const pm_string_node_t * ) node->receiver)->unescaped);
7371 const struct rb_callinfo *callinfo = new_callinfo(iseq, idUMinus, 0, 0, NULL, FALSE);
7372 PUSH_INSN2(ret, location, opt_str_uminus, value, callinfo);
7373 if (popped) PUSH_INSN(ret, location, pop);
7374 return;
7375 }
7376 break;
7377 }
7378 case idFreeze: {
7379 if (pm_opt_str_freeze_p(iseq, node)) {
7380 VALUE value = parse_static_literal_string(iseq, scope_node, node->receiver, &((const pm_string_node_t * ) node->receiver)->unescaped);
7381 const struct rb_callinfo *callinfo = new_callinfo(iseq, idFreeze, 0, 0, NULL, FALSE);
7382 PUSH_INSN2(ret, location, opt_str_freeze, value, callinfo);
7383 if (popped) PUSH_INSN(ret, location, pop);
7384 return;
7385 }
7386 break;
7387 }
7388 }
7389
7390 if (PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE) && !popped) {
7391 PUSH_INSN(ret, location, putnil);
7392 }
7393
7394 if (node->receiver == NULL) {
7395 PUSH_INSN(ret, location, putself);
7396 }
7397 else {
7398 if (method_id == idCall && PM_NODE_TYPE_P(node->receiver, PM_LOCAL_VARIABLE_READ_NODE)) {
7399 const pm_local_variable_read_node_t *read_node_cast = (const pm_local_variable_read_node_t *) node->receiver;
7400 uint32_t node_id = node->receiver->node_id;
7401 int idx, level;
7402
7403 if (iseq_block_param_id_p(iseq, pm_constant_id_lookup(scope_node, read_node_cast->name), &idx, &level)) {
7404 ADD_ELEM(ret, (LINK_ELEMENT *) new_insn_body(iseq, location.line, node_id, BIN(getblockparamproxy), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
7405 }
7406 else {
7407 PM_COMPILE_NOT_POPPED(node->receiver);
7408 }
7409 }
7410 else {
7411 PM_COMPILE_NOT_POPPED(node->receiver);
7412 }
7413 }
7414
7415 pm_compile_call(iseq, node, ret, popped, scope_node, method_id, start);
7416 return;
7417}
7418
7419static inline void
7420pm_compile_call_operator_write_node(rb_iseq_t *iseq, const pm_call_operator_write_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7421{
7422 int flag = 0;
7423
7424 if (PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) {
7425 flag = VM_CALL_FCALL;
7426 }
7427
7428 PM_COMPILE_NOT_POPPED(node->receiver);
7429
7430 LABEL *safe_label = NULL;
7431 if (PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
7432 safe_label = NEW_LABEL(location->line);
7433 PUSH_INSN(ret, *location, dup);
7434 PUSH_INSNL(ret, *location, branchnil, safe_label);
7435 }
7436
7437 PUSH_INSN(ret, *location, dup);
7438
7439 ID id_read_name = pm_constant_id_lookup(scope_node, node->read_name);
7440 PUSH_SEND_WITH_FLAG(ret, *location, id_read_name, INT2FIX(0), INT2FIX(flag));
7441
7442 PM_COMPILE_NOT_POPPED(node->value);
7443 ID id_operator = pm_constant_id_lookup(scope_node, node->binary_operator);
7444 PUSH_SEND(ret, *location, id_operator, INT2FIX(1));
7445
7446 if (!popped) {
7447 PUSH_INSN(ret, *location, swap);
7448 PUSH_INSN1(ret, *location, topn, INT2FIX(1));
7449 }
7450
7451 ID id_write_name = pm_constant_id_lookup(scope_node, node->write_name);
7452 PUSH_SEND_WITH_FLAG(ret, *location, id_write_name, INT2FIX(1), INT2FIX(flag));
7453
7454 if (safe_label != NULL && popped) PUSH_LABEL(ret, safe_label);
7455 PUSH_INSN(ret, *location, pop);
7456 if (safe_label != NULL && !popped) PUSH_LABEL(ret, safe_label);
7457}
7458
7475static VALUE
7476pm_compile_case_node_dispatch(rb_iseq_t *iseq, VALUE dispatch, const pm_node_t *node, LABEL *label, const pm_scope_node_t *scope_node)
7477{
7478 VALUE key = Qundef;
7479 switch (PM_NODE_TYPE(node)) {
7480 case PM_FLOAT_NODE: {
7481 key = pm_static_literal_value(iseq, node, scope_node);
7482 double intptr;
7483
7484 if (modf(RFLOAT_VALUE(key), &intptr) == 0.0) {
7485 key = (FIXABLE(intptr) ? LONG2FIX((long) intptr) : rb_dbl2big(intptr));
7486 }
7487
7488 break;
7489 }
7490 case PM_FALSE_NODE:
7491 case PM_INTEGER_NODE:
7492 case PM_NIL_NODE:
7493 case PM_SOURCE_FILE_NODE:
7494 case PM_SOURCE_LINE_NODE:
7495 case PM_SYMBOL_NODE:
7496 case PM_TRUE_NODE:
7497 key = pm_static_literal_value(iseq, node, scope_node);
7498 break;
7499 case PM_STRING_NODE: {
7500 const pm_string_node_t *cast = (const pm_string_node_t *) node;
7501 key = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
7502 break;
7503 }
7504 default:
7505 return Qundef;
7506 }
7507
7508 if (NIL_P(rb_hash_lookup(dispatch, key))) {
7509 rb_hash_aset(dispatch, key, ((VALUE) label) | 1);
7510 }
7511 return dispatch;
7512}
7513
7517static inline void
7518pm_compile_case_node(rb_iseq_t *iseq, const pm_case_node_t *cast, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7519{
7520 const pm_parser_t *parser = scope_node->parser;
7521 const pm_node_location_t location = *node_location;
7522 const pm_node_list_t *conditions = &cast->conditions;
7523
7524 // This is the anchor that we will compile the conditions of the various
7525 // `when` nodes into. If a match is found, they will need to jump into
7526 // the body_seq anchor to the correct spot.
7527 DECL_ANCHOR(cond_seq);
7528
7529 // This is the anchor that we will compile the bodies of the various
7530 // `when` nodes into. We'll make sure that the clauses that are compiled
7531 // jump into the correct spots within this anchor.
7532 DECL_ANCHOR(body_seq);
7533
7534 // This is the label where all of the when clauses will jump to if they
7535 // have matched and are done executing their bodies.
7536 LABEL *end_label = NEW_LABEL(location.line);
7537
7538 // If we have a predicate on this case statement, then it's going to
7539 // compare all of the various when clauses to the predicate. If we
7540 // don't, then it's basically an if-elsif-else chain.
7541 if (cast->predicate == NULL) {
7542 // Establish branch coverage for the case node.
7543 VALUE branches = Qfalse;
7544 rb_code_location_t case_location = { 0 };
7545 int branch_id = 0;
7546
7547 if (PM_BRANCH_COVERAGE_P(iseq)) {
7548 case_location = pm_code_location(scope_node, (const pm_node_t *) cast);
7549 branches = decl_branch_base(iseq, PTR2NUM(cast), &case_location, "case");
7550 }
7551
7552 // Loop through each clauses in the case node and compile each of
7553 // the conditions within them into cond_seq. If they match, they
7554 // should jump into their respective bodies in body_seq.
7555 for (size_t clause_index = 0; clause_index < conditions->size; clause_index++) {
7556 const pm_when_node_t *clause = (const pm_when_node_t *) conditions->nodes[clause_index];
7557 const pm_node_list_t *conditions = &clause->conditions;
7558
7559 int clause_lineno = pm_node_line_number(parser, (const pm_node_t *) clause);
7560 LABEL *label = NEW_LABEL(clause_lineno);
7561 PUSH_LABEL(body_seq, label);
7562
7563 // Establish branch coverage for the when clause.
7564 if (PM_BRANCH_COVERAGE_P(iseq)) {
7565 rb_code_location_t branch_location = pm_code_location(scope_node, clause->statements != NULL ? ((const pm_node_t *) clause->statements) : ((const pm_node_t *) clause));
7566 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "when", branches);
7567 }
7568
7569 if (clause->statements != NULL) {
7570 pm_compile_node(iseq, (const pm_node_t *) clause->statements, body_seq, popped, scope_node);
7571 }
7572 else if (!popped) {
7573 PUSH_SYNTHETIC_PUTNIL(body_seq, iseq);
7574 }
7575
7576 PUSH_INSNL(body_seq, location, jump, end_label);
7577
7578 // Compile each of the conditions for the when clause into the
7579 // cond_seq. Each one should have a unique condition and should
7580 // jump to the subsequent one if it doesn't match.
7581 for (size_t condition_index = 0; condition_index < conditions->size; condition_index++) {
7582 const pm_node_t *condition = conditions->nodes[condition_index];
7583
7584 if (PM_NODE_TYPE_P(condition, PM_SPLAT_NODE)) {
7585 pm_node_location_t cond_location = PM_NODE_START_LOCATION(parser, condition);
7586 PUSH_INSN(cond_seq, cond_location, putnil);
7587 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
7588 PUSH_INSN1(cond_seq, cond_location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_WHEN | VM_CHECKMATCH_ARRAY));
7589 PUSH_INSNL(cond_seq, cond_location, branchif, label);
7590 }
7591 else {
7592 LABEL *next_label = NEW_LABEL(pm_node_line_number(parser, condition));
7593 pm_compile_branch_condition(iseq, cond_seq, condition, label, next_label, false, scope_node);
7594 PUSH_LABEL(cond_seq, next_label);
7595 }
7596 }
7597 }
7598
7599 // Establish branch coverage for the else clause (implicit or
7600 // explicit).
7601 if (PM_BRANCH_COVERAGE_P(iseq)) {
7602 rb_code_location_t branch_location;
7603
7604 if (cast->else_clause == NULL) {
7605 branch_location = case_location;
7606 } else if (cast->else_clause->statements == NULL) {
7607 branch_location = pm_code_location(scope_node, (const pm_node_t *) cast->else_clause);
7608 } else {
7609 branch_location = pm_code_location(scope_node, (const pm_node_t *) cast->else_clause->statements);
7610 }
7611
7612 add_trace_branch_coverage(iseq, cond_seq, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
7613 }
7614
7615 // Compile the else clause if there is one.
7616 if (cast->else_clause != NULL) {
7617 pm_compile_node(iseq, (const pm_node_t *) cast->else_clause, cond_seq, popped, scope_node);
7618 }
7619 else if (!popped) {
7620 PUSH_SYNTHETIC_PUTNIL(cond_seq, iseq);
7621 }
7622
7623 // Finally, jump to the end label if none of the other conditions
7624 // have matched.
7625 PUSH_INSNL(cond_seq, location, jump, end_label);
7626 PUSH_SEQ(ret, cond_seq);
7627 }
7628 else {
7629 // Establish branch coverage for the case node.
7630 VALUE branches = Qfalse;
7631 rb_code_location_t case_location = { 0 };
7632 int branch_id = 0;
7633
7634 if (PM_BRANCH_COVERAGE_P(iseq)) {
7635 case_location = pm_code_location(scope_node, (const pm_node_t *) cast);
7636 branches = decl_branch_base(iseq, PTR2NUM(cast), &case_location, "case");
7637 }
7638
7639 // This is the label where everything will fall into if none of the
7640 // conditions matched.
7641 LABEL *else_label = NEW_LABEL(location.line);
7642
7643 // It's possible for us to speed up the case node by using a
7644 // dispatch hash. This is a hash that maps the conditions of the
7645 // various when clauses to the labels of their bodies. If we can
7646 // compile the conditions into a hash key, then we can use a hash
7647 // lookup to jump directly to the correct when clause body.
7648 VALUE dispatch = Qundef;
7649 if (ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
7650 dispatch = rb_hash_new();
7651 RHASH_TBL_RAW(dispatch)->type = &cdhash_type;
7652 }
7653
7654 // We're going to loop through each of the conditions in the case
7655 // node and compile each of their contents into both the cond_seq
7656 // and the body_seq. Each condition will use its own label to jump
7657 // from its conditions into its body.
7658 //
7659 // Note that none of the code in the loop below should be adding
7660 // anything to ret, as we're going to be laying out the entire case
7661 // node instructions later.
7662 for (size_t clause_index = 0; clause_index < conditions->size; clause_index++) {
7663 const pm_when_node_t *clause = (const pm_when_node_t *) conditions->nodes[clause_index];
7664 pm_node_location_t clause_location = PM_NODE_START_LOCATION(parser, (const pm_node_t *) clause);
7665
7666 const pm_node_list_t *conditions = &clause->conditions;
7667 LABEL *label = NEW_LABEL(clause_location.line);
7668
7669 // Compile each of the conditions for the when clause into the
7670 // cond_seq. Each one should have a unique comparison that then
7671 // jumps into the body if it matches.
7672 for (size_t condition_index = 0; condition_index < conditions->size; condition_index++) {
7673 const pm_node_t *condition = conditions->nodes[condition_index];
7674 const pm_node_location_t condition_location = PM_NODE_START_LOCATION(parser, condition);
7675
7676 // If we haven't already abandoned the optimization, then
7677 // we're going to try to compile the condition into the
7678 // dispatch hash.
7679 if (dispatch != Qundef) {
7680 dispatch = pm_compile_case_node_dispatch(iseq, dispatch, condition, label, scope_node);
7681 }
7682
7683 if (PM_NODE_TYPE_P(condition, PM_SPLAT_NODE)) {
7684 PUSH_INSN(cond_seq, condition_location, dup);
7685 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
7686 PUSH_INSN1(cond_seq, condition_location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
7687 }
7688 else {
7689 if (PM_NODE_TYPE_P(condition, PM_STRING_NODE)) {
7690 const pm_string_node_t *string = (const pm_string_node_t *) condition;
7691 VALUE value = parse_static_literal_string(iseq, scope_node, condition, &string->unescaped);
7692 PUSH_INSN1(cond_seq, condition_location, putobject, value);
7693 }
7694 else {
7695 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
7696 }
7697
7698 PUSH_INSN1(cond_seq, condition_location, topn, INT2FIX(1));
7699 PUSH_SEND_WITH_FLAG(cond_seq, condition_location, idEqq, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
7700 }
7701
7702 PUSH_INSNL(cond_seq, condition_location, branchif, label);
7703 }
7704
7705 // Now, add the label to the body and compile the body of the
7706 // when clause. This involves popping the predicate, compiling
7707 // the statements to be executed, and then compiling a jump to
7708 // the end of the case node.
7709 PUSH_LABEL(body_seq, label);
7710 PUSH_INSN(body_seq, clause_location, pop);
7711
7712 // Establish branch coverage for the when clause.
7713 if (PM_BRANCH_COVERAGE_P(iseq)) {
7714 rb_code_location_t branch_location = pm_code_location(scope_node, clause->statements != NULL ? ((const pm_node_t *) clause->statements) : ((const pm_node_t *) clause));
7715 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "when", branches);
7716 }
7717
7718 if (clause->statements != NULL) {
7719 pm_compile_node(iseq, (const pm_node_t *) clause->statements, body_seq, popped, scope_node);
7720 }
7721 else if (!popped) {
7722 PUSH_SYNTHETIC_PUTNIL(body_seq, iseq);
7723 }
7724
7725 PUSH_INSNL(body_seq, clause_location, jump, end_label);
7726 }
7727
7728 // Now that we have compiled the conditions and the bodies of the
7729 // various when clauses, we can compile the predicate, lay out the
7730 // conditions, compile the fallback subsequent if there is one, and
7731 // finally put in the bodies of the when clauses.
7732 PM_COMPILE_NOT_POPPED(cast->predicate);
7733
7734 // If we have a dispatch hash, then we'll use it here to create the
7735 // optimization.
7736 if (dispatch != Qundef) {
7737 PUSH_INSN(ret, location, dup);
7738 RB_OBJ_SET_SHAREABLE(dispatch); // it is special that the hash is shareable but not frozen, because compile.c modify them. This Hahs instance is not accessible so it is safe to leave it.
7739 PUSH_INSN2(ret, location, opt_case_dispatch, dispatch, else_label);
7740 LABEL_REF(else_label);
7741 }
7742
7743 PUSH_SEQ(ret, cond_seq);
7744
7745 // Compile either the explicit else clause or an implicit else
7746 // clause.
7747 PUSH_LABEL(ret, else_label);
7748
7749 if (cast->else_clause != NULL) {
7750 pm_node_location_t else_location = PM_NODE_START_LOCATION(parser, cast->else_clause->statements != NULL ? ((const pm_node_t *) cast->else_clause->statements) : ((const pm_node_t *) cast->else_clause));
7751 PUSH_INSN(ret, else_location, pop);
7752
7753 // Establish branch coverage for the else clause.
7754 if (PM_BRANCH_COVERAGE_P(iseq)) {
7755 rb_code_location_t branch_location = pm_code_location(scope_node, cast->else_clause->statements != NULL ? ((const pm_node_t *) cast->else_clause->statements) : ((const pm_node_t *) cast->else_clause));
7756 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
7757 }
7758
7759 PM_COMPILE((const pm_node_t *) cast->else_clause);
7760 PUSH_INSNL(ret, else_location, jump, end_label);
7761 }
7762 else {
7763 PUSH_INSN(ret, location, pop);
7764
7765 // Establish branch coverage for the implicit else clause.
7766 if (PM_BRANCH_COVERAGE_P(iseq)) {
7767 add_trace_branch_coverage(iseq, ret, &case_location, case_location.beg_pos.column, branch_id, "else", branches);
7768 }
7769
7770 if (!popped) PUSH_INSN(ret, location, putnil);
7771 PUSH_INSNL(ret, location, jump, end_label);
7772 }
7773 }
7774
7775 PUSH_SEQ(ret, body_seq);
7776 PUSH_LABEL(ret, end_label);
7777}
7778
7779static inline void
7780pm_compile_case_match_node(rb_iseq_t *iseq, const pm_case_match_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7781{
7782 // This is the anchor that we will compile the bodies of the various
7783 // `in` nodes into. We'll make sure that the patterns that are compiled
7784 // jump into the correct spots within this anchor.
7785 DECL_ANCHOR(body_seq);
7786
7787 // This is the anchor that we will compile the patterns of the various
7788 // `in` nodes into. If a match is found, they will need to jump into the
7789 // body_seq anchor to the correct spot.
7790 DECL_ANCHOR(cond_seq);
7791
7792 // This label is used to indicate the end of the entire node. It is
7793 // jumped to after the entire stack is cleaned up.
7794 LABEL *end_label = NEW_LABEL(location->line);
7795
7796 // This label is used as the fallback for the case match. If no match is
7797 // found, then we jump to this label. This is either an `else` clause or
7798 // an error handler.
7799 LABEL *else_label = NEW_LABEL(location->line);
7800
7801 // We're going to use this to uniquely identify each branch so that we
7802 // can track coverage information.
7803 rb_code_location_t case_location = { 0 };
7804 VALUE branches = Qfalse;
7805 int branch_id = 0;
7806
7807 if (PM_BRANCH_COVERAGE_P(iseq)) {
7808 case_location = pm_code_location(scope_node, (const pm_node_t *) node);
7809 branches = decl_branch_base(iseq, PTR2NUM(node), &case_location, "case");
7810 }
7811
7812 // If there is only one pattern, then the behavior changes a bit. It
7813 // effectively gets treated as a match required node (this is how it is
7814 // represented in the other parser).
7815 bool in_single_pattern = node->else_clause == NULL && node->conditions.size == 1;
7816
7817 // First, we're going to push a bunch of stuff onto the stack that is
7818 // going to serve as our scratch space.
7819 if (in_single_pattern) {
7820 PUSH_INSN(ret, *location, putnil); // key error key
7821 PUSH_INSN(ret, *location, putnil); // key error matchee
7822 PUSH_INSN1(ret, *location, putobject, Qfalse); // key error?
7823 PUSH_INSN(ret, *location, putnil); // error string
7824 }
7825
7826 // Now we're going to compile the value to match against.
7827 PUSH_INSN(ret, *location, putnil); // deconstruct cache
7828 PM_COMPILE_NOT_POPPED(node->predicate);
7829
7830 // Next, we'll loop through every in clause and compile its body into
7831 // the body_seq anchor and its pattern into the cond_seq anchor. We'll
7832 // make sure the pattern knows how to jump correctly into the body if it
7833 // finds a match.
7834 for (size_t index = 0; index < node->conditions.size; index++) {
7835 const pm_node_t *condition = node->conditions.nodes[index];
7836 RUBY_ASSERT(PM_NODE_TYPE_P(condition, PM_IN_NODE));
7837
7838 const pm_in_node_t *in_node = (const pm_in_node_t *) condition;
7839 const pm_node_location_t in_location = PM_NODE_START_LOCATION(scope_node->parser, in_node);
7840 const pm_node_location_t pattern_location = PM_NODE_START_LOCATION(scope_node->parser, in_node->pattern);
7841
7842 if (branch_id) {
7843 PUSH_INSN(body_seq, in_location, putnil);
7844 }
7845
7846 LABEL *body_label = NEW_LABEL(in_location.line);
7847 PUSH_LABEL(body_seq, body_label);
7848 PUSH_INSN1(body_seq, in_location, adjuststack, INT2FIX(in_single_pattern ? 6 : 2));
7849
7850 // Establish branch coverage for the in clause.
7851 if (PM_BRANCH_COVERAGE_P(iseq)) {
7852 rb_code_location_t branch_location = pm_code_location(scope_node, in_node->statements != NULL ? ((const pm_node_t *) in_node->statements) : ((const pm_node_t *) in_node));
7853 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "in", branches);
7854 }
7855
7856 if (in_node->statements != NULL) {
7857 PM_COMPILE_INTO_ANCHOR(body_seq, (const pm_node_t *) in_node->statements);
7858 }
7859 else if (!popped) {
7860 PUSH_SYNTHETIC_PUTNIL(body_seq, iseq);
7861 }
7862
7863 PUSH_INSNL(body_seq, in_location, jump, end_label);
7864 LABEL *next_pattern_label = NEW_LABEL(pattern_location.line);
7865
7866 PUSH_INSN(cond_seq, pattern_location, dup);
7867 pm_compile_pattern(iseq, scope_node, in_node->pattern, cond_seq, body_label, next_pattern_label, in_single_pattern, true, 2);
7868 PUSH_LABEL(cond_seq, next_pattern_label);
7869 LABEL_UNREMOVABLE(next_pattern_label);
7870 }
7871
7872 if (node->else_clause != NULL) {
7873 // If we have an `else` clause, then this becomes our fallback (and
7874 // there is no need to compile in code to potentially raise an
7875 // error).
7876 const pm_else_node_t *else_node = node->else_clause;
7877
7878 PUSH_LABEL(cond_seq, else_label);
7879 PUSH_INSN(cond_seq, *location, pop);
7880 PUSH_INSN(cond_seq, *location, pop);
7881
7882 // Establish branch coverage for the else clause.
7883 if (PM_BRANCH_COVERAGE_P(iseq)) {
7884 rb_code_location_t branch_location = pm_code_location(scope_node, else_node->statements != NULL ? ((const pm_node_t *) else_node->statements) : ((const pm_node_t *) else_node));
7885 add_trace_branch_coverage(iseq, cond_seq, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
7886 }
7887
7888 PM_COMPILE_INTO_ANCHOR(cond_seq, (const pm_node_t *) else_node);
7889 PUSH_INSNL(cond_seq, *location, jump, end_label);
7890 PUSH_INSN(cond_seq, *location, putnil);
7891 if (popped) PUSH_INSN(cond_seq, *location, putnil);
7892 }
7893 else {
7894 // Otherwise, if we do not have an `else` clause, we will compile in
7895 // the code to handle raising an appropriate error.
7896 PUSH_LABEL(cond_seq, else_label);
7897
7898 // Establish branch coverage for the implicit else clause.
7899 add_trace_branch_coverage(iseq, cond_seq, &case_location, case_location.beg_pos.column, branch_id, "else", branches);
7900
7901 if (in_single_pattern) {
7902 pm_compile_pattern_error_handler(iseq, scope_node, (const pm_node_t *) node, cond_seq, end_label, popped);
7903 }
7904 else {
7905 PUSH_INSN1(cond_seq, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7906 PUSH_INSN1(cond_seq, *location, putobject, rb_eNoMatchingPatternError);
7907 PUSH_INSN1(cond_seq, *location, topn, INT2FIX(2));
7908 PUSH_SEND(cond_seq, *location, id_core_raise, INT2FIX(2));
7909
7910 PUSH_INSN1(cond_seq, *location, adjuststack, INT2FIX(3));
7911 if (!popped) PUSH_INSN(cond_seq, *location, putnil);
7912 PUSH_INSNL(cond_seq, *location, jump, end_label);
7913 PUSH_INSN1(cond_seq, *location, dupn, INT2FIX(1));
7914 if (popped) PUSH_INSN(cond_seq, *location, putnil);
7915 }
7916 }
7917
7918 // At the end of all of this compilation, we will add the code for the
7919 // conditions first, then the various bodies, then mark the end of the
7920 // entire sequence with the end label.
7921 PUSH_SEQ(ret, cond_seq);
7922 PUSH_SEQ(ret, body_seq);
7923 PUSH_LABEL(ret, end_label);
7924}
7925
7926static inline void
7927pm_compile_forwarding_super_node(rb_iseq_t *iseq, const pm_forwarding_super_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7928{
7929 const rb_iseq_t *block = NULL;
7930 const rb_iseq_t *previous_block = NULL;
7931 LABEL *retry_label = NULL;
7932 LABEL *retry_end_l = NULL;
7933
7934 if (node->block != NULL) {
7935 previous_block = ISEQ_COMPILE_DATA(iseq)->current_block;
7936 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
7937
7938 retry_label = NEW_LABEL(location->line);
7939 retry_end_l = NEW_LABEL(location->line);
7940
7941 PUSH_LABEL(ret, retry_label);
7942 }
7943 else {
7944 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
7945 }
7946
7947 PUSH_INSN(ret, *location, putself);
7948 int flag = VM_CALL_ZSUPER | VM_CALL_SUPER | VM_CALL_FCALL;
7949
7950 if (node->block != NULL) {
7951 pm_scope_node_t next_scope_node;
7952 pm_scope_node_init((const pm_node_t *) node->block, &next_scope_node, scope_node);
7953
7954 ISEQ_COMPILE_DATA(iseq)->current_block = block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location->line);
7955 pm_scope_node_destroy(&next_scope_node);
7956 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) block);
7957 }
7958
7959 DECL_ANCHOR(args);
7960
7961 struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
7962 const rb_iseq_t *local_iseq = body->local_iseq;
7963 const struct rb_iseq_constant_body *const local_body = ISEQ_BODY(local_iseq);
7964
7965 int argc = 0;
7966 int depth = get_lvar_level(iseq);
7967
7968 if (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
7969 flag |= VM_CALL_FORWARDING;
7970 pm_local_index_t mult_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_DOT3, 0);
7971 PUSH_GETLOCAL(ret, *location, mult_local.index, mult_local.level);
7972
7973 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, 0, flag, NULL, block != NULL);
7974 PUSH_INSN2(ret, *location, invokesuperforward, callinfo, block);
7975
7976 if (popped) PUSH_INSN(ret, *location, pop);
7977 if (node->block) {
7978 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
7979 }
7980 return;
7981 }
7982
7983 if (local_body->param.flags.has_lead) {
7984 /* required arguments */
7985 for (int i = 0; i < local_body->param.lead_num; i++) {
7986 int idx = local_body->local_table_size - i;
7987 PUSH_GETLOCAL(args, *location, idx, depth);
7988 }
7989 argc += local_body->param.lead_num;
7990 }
7991
7992 if (local_body->param.flags.has_opt) {
7993 /* optional arguments */
7994 for (int j = 0; j < local_body->param.opt_num; j++) {
7995 int idx = local_body->local_table_size - (argc + j);
7996 PUSH_GETLOCAL(args, *location, idx, depth);
7997 }
7998 argc += local_body->param.opt_num;
7999 }
8000
8001 if (local_body->param.flags.has_rest) {
8002 /* rest argument */
8003 int idx = local_body->local_table_size - local_body->param.rest_start;
8004 PUSH_GETLOCAL(args, *location, idx, depth);
8005 PUSH_INSN1(args, *location, splatarray, Qfalse);
8006
8007 argc = local_body->param.rest_start + 1;
8008 flag |= VM_CALL_ARGS_SPLAT;
8009 }
8010
8011 if (local_body->param.flags.has_post) {
8012 /* post arguments */
8013 int post_len = local_body->param.post_num;
8014 int post_start = local_body->param.post_start;
8015
8016 int j = 0;
8017 for (; j < post_len; j++) {
8018 int idx = local_body->local_table_size - (post_start + j);
8019 PUSH_GETLOCAL(args, *location, idx, depth);
8020 }
8021
8022 if (local_body->param.flags.has_rest) {
8023 // argc remains unchanged from rest branch
8024 PUSH_INSN1(args, *location, newarray, INT2FIX(j));
8025 PUSH_INSN(args, *location, concatarray);
8026 }
8027 else {
8028 argc = post_len + post_start;
8029 }
8030 }
8031
8032 const struct rb_iseq_param_keyword *const local_keyword = local_body->param.keyword;
8033 if (local_body->param.flags.has_kw) {
8034 int local_size = local_body->local_table_size;
8035 argc++;
8036
8037 PUSH_INSN1(args, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8038
8039 if (local_body->param.flags.has_kwrest) {
8040 int idx = local_body->local_table_size - local_keyword->rest_start;
8041 PUSH_GETLOCAL(args, *location, idx, depth);
8042 RUBY_ASSERT(local_keyword->num > 0);
8043 PUSH_SEND(args, *location, rb_intern("dup"), INT2FIX(0));
8044 }
8045 else {
8046 PUSH_INSN1(args, *location, newhash, INT2FIX(0));
8047 }
8048 int i = 0;
8049 for (; i < local_keyword->num; ++i) {
8050 ID id = local_keyword->table[i];
8051 int idx = local_size - get_local_var_idx(local_iseq, id);
8052
8053 {
8054 VALUE operand = ID2SYM(id);
8055 PUSH_INSN1(args, *location, putobject, operand);
8056 }
8057
8058 PUSH_GETLOCAL(args, *location, idx, depth);
8059 }
8060
8061 PUSH_SEND(args, *location, id_core_hash_merge_ptr, INT2FIX(i * 2 + 1));
8062 flag |= VM_CALL_KW_SPLAT| VM_CALL_KW_SPLAT_MUT;
8063 }
8064 else if (local_body->param.flags.has_kwrest) {
8065 int idx = local_body->local_table_size - local_keyword->rest_start;
8066 PUSH_GETLOCAL(args, *location, idx, depth);
8067 argc++;
8068 flag |= VM_CALL_KW_SPLAT;
8069 }
8070
8071 PUSH_SEQ(ret, args);
8072
8073 {
8074 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, argc, flag, NULL, block != NULL);
8075 PUSH_INSN2(ret, *location, invokesuper, callinfo, block);
8076 }
8077
8078 if (node->block != NULL) {
8079 pm_compile_retry_end_label(iseq, ret, retry_end_l);
8080 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, block, retry_end_l);
8081 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
8082 }
8083
8084 if (popped) PUSH_INSN(ret, *location, pop);
8085}
8086
8087static inline void
8088pm_compile_match_required_node(rb_iseq_t *iseq, const pm_match_required_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8089{
8090 LABEL *matched_label = NEW_LABEL(location->line);
8091 LABEL *unmatched_label = NEW_LABEL(location->line);
8092 LABEL *done_label = NEW_LABEL(location->line);
8093
8094 // First, we're going to push a bunch of stuff onto the stack that is
8095 // going to serve as our scratch space.
8096 PUSH_INSN(ret, *location, putnil); // key error key
8097 PUSH_INSN(ret, *location, putnil); // key error matchee
8098 PUSH_INSN1(ret, *location, putobject, Qfalse); // key error?
8099 PUSH_INSN(ret, *location, putnil); // error string
8100 PUSH_INSN(ret, *location, putnil); // deconstruct cache
8101
8102 // Next we're going to compile the value expression such that it's on
8103 // the stack.
8104 PM_COMPILE_NOT_POPPED(node->value);
8105
8106 // Here we'll dup it so that it can be used for comparison, but also be
8107 // used for error handling.
8108 PUSH_INSN(ret, *location, dup);
8109
8110 // Next we'll compile the pattern. We indicate to the pm_compile_pattern
8111 // function that this is the only pattern that will be matched against
8112 // through the in_single_pattern parameter. We also indicate that the
8113 // value to compare against is 2 slots from the top of the stack (the
8114 // base_index parameter).
8115 pm_compile_pattern(iseq, scope_node, node->pattern, ret, matched_label, unmatched_label, true, true, 2);
8116
8117 // If the pattern did not match the value, then we're going to compile
8118 // in our error handler code. This will determine which error to raise
8119 // and raise it.
8120 PUSH_LABEL(ret, unmatched_label);
8121 pm_compile_pattern_error_handler(iseq, scope_node, (const pm_node_t *) node, ret, done_label, popped);
8122
8123 // If the pattern did match, we'll clean up the values we've pushed onto
8124 // the stack and then push nil onto the stack if it's not popped.
8125 PUSH_LABEL(ret, matched_label);
8126 PUSH_INSN1(ret, *location, adjuststack, INT2FIX(6));
8127 if (!popped) PUSH_INSN(ret, *location, putnil);
8128 PUSH_INSNL(ret, *location, jump, done_label);
8129
8130 PUSH_LABEL(ret, done_label);
8131}
8132
8133static inline void
8134pm_compile_match_write_node(rb_iseq_t *iseq, const pm_match_write_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8135{
8136 LABEL *fail_label = NEW_LABEL(location->line);
8137 LABEL *end_label = NEW_LABEL(location->line);
8138
8139 // First, we'll compile the call so that all of its instructions are
8140 // present. Then we'll compile all of the local variable targets.
8141 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->call);
8142
8143 // Now, check if the match was successful. If it was, then we'll
8144 // continue on and assign local variables. Otherwise we'll skip over the
8145 // assignment code.
8146 {
8147 VALUE operand = rb_id2sym(idBACKREF);
8148 PUSH_INSN1(ret, *location, getglobal, operand);
8149 }
8150
8151 PUSH_INSN(ret, *location, dup);
8152 PUSH_INSNL(ret, *location, branchunless, fail_label);
8153
8154 // If there's only a single local variable target, we can skip some of
8155 // the bookkeeping, so we'll put a special branch here.
8156 size_t targets_count = node->targets.size;
8157
8158 if (targets_count == 1) {
8159 const pm_node_t *target = node->targets.nodes[0];
8160 RUBY_ASSERT(PM_NODE_TYPE_P(target, PM_LOCAL_VARIABLE_TARGET_NODE));
8161
8162 const pm_local_variable_target_node_t *local_target = (const pm_local_variable_target_node_t *) target;
8163 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, local_target->name, local_target->depth);
8164
8165 {
8166 VALUE operand = rb_id2sym(pm_constant_id_lookup(scope_node, local_target->name));
8167 PUSH_INSN1(ret, *location, putobject, operand);
8168 }
8169
8170 PUSH_SEND(ret, *location, idAREF, INT2FIX(1));
8171 PUSH_LABEL(ret, fail_label);
8172 PUSH_SETLOCAL(ret, *location, index.index, index.level);
8173 if (popped) PUSH_INSN(ret, *location, pop);
8174 return;
8175 }
8176
8177 DECL_ANCHOR(fail_anchor);
8178
8179 // Otherwise there is more than one local variable target, so we'll need
8180 // to do some bookkeeping.
8181 for (size_t targets_index = 0; targets_index < targets_count; targets_index++) {
8182 const pm_node_t *target = node->targets.nodes[targets_index];
8183 RUBY_ASSERT(PM_NODE_TYPE_P(target, PM_LOCAL_VARIABLE_TARGET_NODE));
8184
8185 const pm_local_variable_target_node_t *local_target = (const pm_local_variable_target_node_t *) target;
8186 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, local_target->name, local_target->depth);
8187
8188 if (((size_t) targets_index) < (targets_count - 1)) {
8189 PUSH_INSN(ret, *location, dup);
8190 }
8191
8192 {
8193 VALUE operand = rb_id2sym(pm_constant_id_lookup(scope_node, local_target->name));
8194 PUSH_INSN1(ret, *location, putobject, operand);
8195 }
8196
8197 PUSH_SEND(ret, *location, idAREF, INT2FIX(1));
8198 PUSH_SETLOCAL(ret, *location, index.index, index.level);
8199
8200 PUSH_INSN(fail_anchor, *location, putnil);
8201 PUSH_SETLOCAL(fail_anchor, *location, index.index, index.level);
8202 }
8203
8204 // Since we matched successfully, now we'll jump to the end.
8205 PUSH_INSNL(ret, *location, jump, end_label);
8206
8207 // In the case that the match failed, we'll loop through each local
8208 // variable target and set all of them to `nil`.
8209 PUSH_LABEL(ret, fail_label);
8210 PUSH_INSN(ret, *location, pop);
8211 PUSH_SEQ(ret, fail_anchor);
8212
8213 // Finally, we can push the end label for either case.
8214 PUSH_LABEL(ret, end_label);
8215 if (popped) PUSH_INSN(ret, *location, pop);
8216}
8217
8218static inline void
8219pm_compile_next_node(rb_iseq_t *iseq, const pm_next_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8220{
8221 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8222 LABEL *splabel = NEW_LABEL(0);
8223 PUSH_LABEL(ret, splabel);
8224
8225 if (node->arguments) {
8226 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
8227 }
8228 else {
8229 PUSH_INSN(ret, *location, putnil);
8230 }
8231 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
8232
8233 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->redo_label);
8234 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8235
8236 PUSH_ADJUST_RESTORE(ret, splabel);
8237 if (!popped) PUSH_INSN(ret, *location, putnil);
8238 }
8239 else if (ISEQ_COMPILE_DATA(iseq)->end_label && can_add_ensure_iseq(iseq)) {
8240 LABEL *splabel = NEW_LABEL(0);
8241
8242 PUSH_LABEL(ret, splabel);
8243 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->start_label);
8244
8245 if (node->arguments != NULL) {
8246 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
8247 }
8248 else {
8249 PUSH_INSN(ret, *location, putnil);
8250 }
8251
8252 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
8253 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8254 PUSH_ADJUST_RESTORE(ret, splabel);
8255 splabel->unremovable = FALSE;
8256
8257 if (!popped) PUSH_INSN(ret, *location, putnil);
8258 }
8259 else {
8260 const rb_iseq_t *ip = iseq;
8261 unsigned long throw_flag = 0;
8262
8263 while (ip) {
8264 if (!ISEQ_COMPILE_DATA(ip)) {
8265 ip = 0;
8266 break;
8267 }
8268
8269 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8270 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8271 /* while loop */
8272 break;
8273 }
8274 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
8275 break;
8276 }
8277 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
8278 COMPILE_ERROR(iseq, location->line, "Invalid next");
8279 return;
8280 }
8281
8282 ip = ISEQ_BODY(ip)->parent_iseq;
8283 }
8284
8285 if (ip != 0) {
8286 if (node->arguments) {
8287 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
8288 }
8289 else {
8290 PUSH_INSN(ret, *location, putnil);
8291 }
8292
8293 PUSH_INSN1(ret, *location, throw, INT2FIX(throw_flag | TAG_NEXT));
8294 if (popped) PUSH_INSN(ret, *location, pop);
8295 }
8296 else {
8297 COMPILE_ERROR(iseq, location->line, "Invalid next");
8298 }
8299 }
8300}
8301
8302static inline void
8303pm_compile_redo_node(rb_iseq_t *iseq, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8304{
8305 if (ISEQ_COMPILE_DATA(iseq)->redo_label && can_add_ensure_iseq(iseq)) {
8306 LABEL *splabel = NEW_LABEL(0);
8307
8308 PUSH_LABEL(ret, splabel);
8309 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->redo_label);
8310 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
8311
8312 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->redo_label);
8313 PUSH_ADJUST_RESTORE(ret, splabel);
8314 if (!popped) PUSH_INSN(ret, *location, putnil);
8315 }
8316 else if (ISEQ_BODY(iseq)->type != ISEQ_TYPE_EVAL && ISEQ_COMPILE_DATA(iseq)->start_label && can_add_ensure_iseq(iseq)) {
8317 LABEL *splabel = NEW_LABEL(0);
8318
8319 PUSH_LABEL(ret, splabel);
8320 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
8321 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->start_label);
8322
8323 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8324 PUSH_ADJUST_RESTORE(ret, splabel);
8325 if (!popped) PUSH_INSN(ret, *location, putnil);
8326 }
8327 else {
8328 const rb_iseq_t *ip = iseq;
8329
8330 while (ip) {
8331 if (!ISEQ_COMPILE_DATA(ip)) {
8332 ip = 0;
8333 break;
8334 }
8335
8336 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8337 break;
8338 }
8339 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
8340 break;
8341 }
8342 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
8343 COMPILE_ERROR(iseq, location->line, "Invalid redo");
8344 return;
8345 }
8346
8347 ip = ISEQ_BODY(ip)->parent_iseq;
8348 }
8349
8350 if (ip != 0) {
8351 PUSH_INSN(ret, *location, putnil);
8352 PUSH_INSN1(ret, *location, throw, INT2FIX(VM_THROW_NO_ESCAPE_FLAG | TAG_REDO));
8353 if (popped) PUSH_INSN(ret, *location, pop);
8354 }
8355 else {
8356 COMPILE_ERROR(iseq, location->line, "Invalid redo");
8357 }
8358 }
8359}
8360
8361static inline void
8362pm_compile_rescue_node(rb_iseq_t *iseq, const pm_rescue_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8363{
8364 iseq_set_exception_local_table(iseq);
8365
8366 // First, establish the labels that we need to be able to jump to within
8367 // this compilation block.
8368 LABEL *exception_match_label = NEW_LABEL(location->line);
8369 LABEL *rescue_end_label = NEW_LABEL(location->line);
8370
8371 // Next, compile each of the exceptions that we're going to be
8372 // handling. For each one, we'll add instructions to check if the
8373 // exception matches the raised one, and if it does then jump to the
8374 // exception_match_label label. Otherwise it will fall through to the
8375 // subsequent check. If there are no exceptions, we'll only check
8376 // StandardError.
8377 const pm_node_list_t *exceptions = &node->exceptions;
8378
8379 if (exceptions->size > 0) {
8380 for (size_t index = 0; index < exceptions->size; index++) {
8381 PUSH_GETLOCAL(ret, *location, LVAR_ERRINFO, 0);
8382 PM_COMPILE(exceptions->nodes[index]);
8383 int checkmatch_flags = VM_CHECKMATCH_TYPE_RESCUE;
8384 if (PM_NODE_TYPE_P(exceptions->nodes[index], PM_SPLAT_NODE)) {
8385 checkmatch_flags |= VM_CHECKMATCH_ARRAY;
8386 }
8387 PUSH_INSN1(ret, *location, checkmatch, INT2FIX(checkmatch_flags));
8388 PUSH_INSNL(ret, *location, branchif, exception_match_label);
8389 }
8390 }
8391 else {
8392 PUSH_GETLOCAL(ret, *location, LVAR_ERRINFO, 0);
8393 PUSH_INSN1(ret, *location, putobject, rb_eStandardError);
8394 PUSH_INSN1(ret, *location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8395 PUSH_INSNL(ret, *location, branchif, exception_match_label);
8396 }
8397
8398 // If none of the exceptions that we are matching against matched, then
8399 // we'll jump straight to the rescue_end_label label.
8400 PUSH_INSNL(ret, *location, jump, rescue_end_label);
8401
8402 // Here we have the exception_match_label, which is where the
8403 // control-flow goes in the case that one of the exceptions matched.
8404 // Here we will compile the instructions to handle the exception.
8405 PUSH_LABEL(ret, exception_match_label);
8406 PUSH_TRACE(ret, RUBY_EVENT_RESCUE);
8407
8408 // If we have a reference to the exception, then we'll compile the write
8409 // into the instruction sequence. This can look quite different
8410 // depending on the kind of write being performed.
8411 if (node->reference) {
8412 DECL_ANCHOR(writes);
8413 DECL_ANCHOR(cleanup);
8414
8415 pm_compile_target_node(iseq, node->reference, ret, writes, cleanup, scope_node, NULL);
8416 PUSH_GETLOCAL(ret, *location, LVAR_ERRINFO, 0);
8417
8418 PUSH_SEQ(ret, writes);
8419 PUSH_SEQ(ret, cleanup);
8420 }
8421
8422 // If we have statements to execute, we'll compile them here. Otherwise
8423 // we'll push nil onto the stack.
8424 if (node->statements != NULL) {
8425 // We'll temporarily remove the end_label location from the iseq
8426 // when compiling the statements so that next/redo statements
8427 // inside the body will throw to the correct place instead of
8428 // jumping straight to the end of this iseq
8429 LABEL *prev_end = ISEQ_COMPILE_DATA(iseq)->end_label;
8430 ISEQ_COMPILE_DATA(iseq)->end_label = NULL;
8431
8432 PM_COMPILE((const pm_node_t *) node->statements);
8433
8434 // Now restore the end_label
8435 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end;
8436 }
8437 else {
8438 PUSH_INSN(ret, *location, putnil);
8439 }
8440
8441 PUSH_INSN(ret, *location, leave);
8442
8443 // Here we'll insert the rescue_end_label label, which is jumped to if
8444 // none of the exceptions matched. It will cause the control-flow to
8445 // either jump to the next rescue clause or it will fall through to the
8446 // subsequent instruction returning the raised error.
8447 PUSH_LABEL(ret, rescue_end_label);
8448 if (node->subsequent != NULL) {
8449 PM_COMPILE((const pm_node_t *) node->subsequent);
8450 }
8451 else {
8452 PUSH_GETLOCAL(ret, *location, 1, 0);
8453 }
8454}
8455
8456static inline void
8457pm_compile_return_node(rb_iseq_t *iseq, const pm_return_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8458{
8459 const pm_arguments_node_t *arguments = node->arguments;
8460 enum rb_iseq_type type = ISEQ_BODY(iseq)->type;
8461 LABEL *splabel = 0;
8462
8463 const rb_iseq_t *parent_iseq = iseq;
8464 enum rb_iseq_type parent_type = ISEQ_BODY(parent_iseq)->type;
8465 while (parent_type == ISEQ_TYPE_RESCUE || parent_type == ISEQ_TYPE_ENSURE) {
8466 if (!(parent_iseq = ISEQ_BODY(parent_iseq)->parent_iseq)) break;
8467 parent_type = ISEQ_BODY(parent_iseq)->type;
8468 }
8469
8470 switch (parent_type) {
8471 case ISEQ_TYPE_TOP:
8472 case ISEQ_TYPE_MAIN:
8473 if (arguments) {
8474 rb_warn("argument of top-level return is ignored");
8475 }
8476 if (parent_iseq == iseq) {
8477 type = ISEQ_TYPE_METHOD;
8478 }
8479 break;
8480 default:
8481 break;
8482 }
8483
8484 if (type == ISEQ_TYPE_METHOD) {
8485 splabel = NEW_LABEL(0);
8486 PUSH_LABEL(ret, splabel);
8487 PUSH_ADJUST(ret, *location, 0);
8488 }
8489
8490 if (arguments != NULL) {
8491 PM_COMPILE_NOT_POPPED((const pm_node_t *) arguments);
8492 }
8493 else {
8494 PUSH_INSN(ret, *location, putnil);
8495 }
8496
8497 if (type == ISEQ_TYPE_METHOD && can_add_ensure_iseq(iseq)) {
8498 pm_add_ensure_iseq(ret, iseq, 1, scope_node);
8499 PUSH_TRACE(ret, RUBY_EVENT_RETURN);
8500 PUSH_INSN(ret, *location, leave);
8501 PUSH_ADJUST_RESTORE(ret, splabel);
8502 if (!popped) PUSH_INSN(ret, *location, putnil);
8503 }
8504 else {
8505 PUSH_INSN1(ret, *location, throw, INT2FIX(TAG_RETURN));
8506 if (popped) PUSH_INSN(ret, *location, pop);
8507 }
8508}
8509
8510static inline void
8511pm_compile_super_node(rb_iseq_t *iseq, const pm_super_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8512{
8513 DECL_ANCHOR(args);
8514
8515 LABEL *retry_label = NEW_LABEL(location->line);
8516 LABEL *retry_end_l = NEW_LABEL(location->line);
8517
8518 const rb_iseq_t *previous_block = ISEQ_COMPILE_DATA(iseq)->current_block;
8519 const rb_iseq_t *current_block;
8520 ISEQ_COMPILE_DATA(iseq)->current_block = current_block = NULL;
8521
8522 PUSH_LABEL(ret, retry_label);
8523 PUSH_INSN(ret, *location, putself);
8524
8525 int flags = 0;
8526 struct rb_callinfo_kwarg *keywords = NULL;
8527 int argc = pm_setup_args(node->arguments, node->block, &flags, &keywords, iseq, ret, scope_node, location);
8528 bool is_forwardable = (node->arguments != NULL) && PM_NODE_FLAG_P(node->arguments, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_FORWARDING);
8529 flags |= VM_CALL_SUPER | VM_CALL_FCALL;
8530
8531 if (node->block && PM_NODE_TYPE_P(node->block, PM_BLOCK_NODE)) {
8532 pm_scope_node_t next_scope_node;
8533 pm_scope_node_init(node->block, &next_scope_node, scope_node);
8534
8535 ISEQ_COMPILE_DATA(iseq)->current_block = current_block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location->line);
8536 pm_scope_node_destroy(&next_scope_node);
8537 }
8538
8539 if (!node->block) {
8540 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
8541 }
8542
8543 if ((flags & VM_CALL_ARGS_BLOCKARG) && (flags & VM_CALL_KW_SPLAT) && !(flags & VM_CALL_KW_SPLAT_MUT)) {
8544 PUSH_INSN(args, *location, splatkw);
8545 }
8546
8547 PUSH_SEQ(ret, args);
8548 if (is_forwardable && ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
8549 flags |= VM_CALL_FORWARDING;
8550
8551 {
8552 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, argc, flags, keywords, current_block != NULL);
8553 PUSH_INSN2(ret, *location, invokesuperforward, callinfo, current_block);
8554 }
8555 }
8556 else {
8557 {
8558 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, argc, flags, keywords, current_block != NULL);
8559 PUSH_INSN2(ret, *location, invokesuper, callinfo, current_block);
8560 }
8561
8562 }
8563
8564 pm_compile_retry_end_label(iseq, ret, retry_end_l);
8565
8566 if (popped) PUSH_INSN(ret, *location, pop);
8567 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
8568 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, current_block, retry_end_l);
8569}
8570
8571static inline void
8572pm_compile_yield_node(rb_iseq_t *iseq, const pm_yield_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8573{
8574 switch (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->type) {
8575 case ISEQ_TYPE_TOP:
8576 case ISEQ_TYPE_MAIN:
8577 case ISEQ_TYPE_CLASS:
8578 COMPILE_ERROR(iseq, location->line, "Invalid yield");
8579 return;
8580 default: /* valid */;
8581 }
8582
8583 int argc = 0;
8584 int flags = 0;
8585 struct rb_callinfo_kwarg *keywords = NULL;
8586
8587 if (node->arguments) {
8588 argc = pm_setup_args(node->arguments, NULL, &flags, &keywords, iseq, ret, scope_node, location);
8589 }
8590
8591 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, argc, flags, keywords, FALSE);
8592 PUSH_INSN1(ret, *location, invokeblock, callinfo);
8593
8594 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
8595 if (popped) PUSH_INSN(ret, *location, pop);
8596
8597 int level = 0;
8598 for (const rb_iseq_t *tmp_iseq = iseq; tmp_iseq != ISEQ_BODY(iseq)->local_iseq; level++) {
8599 tmp_iseq = ISEQ_BODY(tmp_iseq)->parent_iseq;
8600 }
8601
8602 if (level > 0) access_outer_variables(iseq, level, rb_intern("yield"), true);
8603}
8604
8615static void
8616pm_compile_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8617{
8618 const pm_parser_t *parser = scope_node->parser;
8619 const pm_node_location_t location = PM_NODE_START_LOCATION(parser, node);
8620 int lineno = (int) location.line;
8621
8622 if (PM_NODE_TYPE_P(node, PM_BEGIN_NODE) && (((const pm_begin_node_t *) node)->statements == NULL) && (((const pm_begin_node_t *) node)->rescue_clause != NULL)) {
8623 // If this node is a begin node and it has empty statements and also
8624 // has a rescue clause, then the other parser considers it as
8625 // starting on the same line as the rescue, as opposed to the
8626 // location of the begin keyword. We replicate that behavior here.
8627 lineno = (int) PM_NODE_START_LINE_COLUMN(parser, ((const pm_begin_node_t *) node)->rescue_clause).line;
8628 }
8629
8630 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_NEWLINE) && ISEQ_COMPILE_DATA(iseq)->last_line != lineno) {
8631 // If this node has the newline flag set and it is on a new line
8632 // from the previous nodes that have been compiled for this ISEQ,
8633 // then we need to emit a newline event.
8634 int event = RUBY_EVENT_LINE;
8635
8636 ISEQ_COMPILE_DATA(iseq)->last_line = lineno;
8637 if (lineno > 0 && ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq)) {
8638 event |= RUBY_EVENT_COVERAGE_LINE;
8639 }
8640 PUSH_TRACE(ret, event);
8641 }
8642
8643 switch (PM_NODE_TYPE(node)) {
8644 case PM_ALIAS_GLOBAL_VARIABLE_NODE:
8645 // alias $foo $bar
8646 // ^^^^^^^^^^^^^^^
8647 pm_compile_alias_global_variable_node(iseq, (const pm_alias_global_variable_node_t *) node, &location, ret, popped, scope_node);
8648 return;
8649 case PM_ALIAS_METHOD_NODE:
8650 // alias foo bar
8651 // ^^^^^^^^^^^^^
8652 pm_compile_alias_method_node(iseq, (const pm_alias_method_node_t *) node, &location, ret, popped, scope_node);
8653 return;
8654 case PM_AND_NODE:
8655 // a and b
8656 // ^^^^^^^
8657 pm_compile_and_node(iseq, (const pm_and_node_t *) node, &location, ret, popped, scope_node);
8658 return;
8659 case PM_ARGUMENTS_NODE: {
8660 // break foo
8661 // ^^^
8662 //
8663 // These are ArgumentsNodes that are not compiled directly by their
8664 // parent call nodes, used in the cases of NextNodes, ReturnNodes, and
8665 // BreakNodes. They can create an array like ArrayNode.
8666 const pm_arguments_node_t *cast = (const pm_arguments_node_t *) node;
8667 const pm_node_list_t *elements = &cast->arguments;
8668
8669 if (elements->size == 1) {
8670 // If we are only returning a single element through one of the jump
8671 // nodes, then we will only compile that node directly.
8672 PM_COMPILE(elements->nodes[0]);
8673 }
8674 else {
8675 pm_compile_array_node(iseq, (const pm_node_t *) cast, elements, &location, ret, popped, scope_node);
8676 }
8677 return;
8678 }
8679 case PM_ARRAY_NODE: {
8680 // [foo, bar, baz]
8681 // ^^^^^^^^^^^^^^^
8682 const pm_array_node_t *cast = (const pm_array_node_t *) node;
8683 pm_compile_array_node(iseq, (const pm_node_t *) cast, &cast->elements, &location, ret, popped, scope_node);
8684 return;
8685 }
8686 case PM_ASSOC_NODE: {
8687 // { foo: 1 }
8688 // ^^^^^^
8689 //
8690 // foo(bar: 1)
8691 // ^^^^^^
8692 const pm_assoc_node_t *cast = (const pm_assoc_node_t *) node;
8693
8694 PM_COMPILE(cast->key);
8695 PM_COMPILE(cast->value);
8696
8697 return;
8698 }
8699 case PM_ASSOC_SPLAT_NODE: {
8700 // { **foo }
8701 // ^^^^^
8702 //
8703 // def foo(**); bar(**); end
8704 // ^^
8705 const pm_assoc_splat_node_t *cast = (const pm_assoc_splat_node_t *) node;
8706
8707 if (cast->value != NULL) {
8708 PM_COMPILE(cast->value);
8709 }
8710 else if (!popped) {
8711 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_POW, 0);
8712 PUSH_GETLOCAL(ret, location, index.index, index.level);
8713 }
8714
8715 return;
8716 }
8717 case PM_BACK_REFERENCE_READ_NODE: {
8718 // $+
8719 // ^^
8720 if (!popped) {
8722 VALUE backref = pm_compile_back_reference_ref(cast);
8723
8724 PUSH_INSN2(ret, location, getspecial, INT2FIX(1), backref);
8725 }
8726 return;
8727 }
8728 case PM_BEGIN_NODE: {
8729 // begin end
8730 // ^^^^^^^^^
8731 const pm_begin_node_t *cast = (const pm_begin_node_t *) node;
8732
8733 if (cast->ensure_clause) {
8734 // Compiling the ensure clause will compile the rescue clause (if
8735 // there is one), which will compile the begin statements.
8736 pm_compile_ensure(iseq, cast, &location, ret, popped, scope_node);
8737 }
8738 else if (cast->rescue_clause) {
8739 // Compiling rescue will compile begin statements (if applicable).
8740 pm_compile_rescue(iseq, cast, &location, ret, popped, scope_node);
8741 }
8742 else {
8743 // If there is neither ensure or rescue, the just compile the
8744 // statements.
8745 if (cast->statements != NULL) {
8746 PM_COMPILE((const pm_node_t *) cast->statements);
8747 }
8748 else if (!popped) {
8749 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
8750 }
8751 }
8752 return;
8753 }
8754 case PM_BLOCK_ARGUMENT_NODE: {
8755 // foo(&bar)
8756 // ^^^^
8757 const pm_block_argument_node_t *cast = (const pm_block_argument_node_t *) node;
8758
8759 if (cast->expression != NULL) {
8760 PM_COMPILE(cast->expression);
8761 }
8762 else {
8763 // If there's no expression, this must be block forwarding.
8764 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_AND, 0);
8765 PUSH_INSN2(ret, location, getblockparamproxy, INT2FIX(local_index.index + VM_ENV_DATA_SIZE - 1), INT2FIX(local_index.level));
8766 }
8767 return;
8768 }
8769 case PM_BREAK_NODE:
8770 // break
8771 // ^^^^^
8772 //
8773 // break foo
8774 // ^^^^^^^^^
8775 pm_compile_break_node(iseq, (const pm_break_node_t *) node, &location, ret, popped, scope_node);
8776 return;
8777 case PM_CALL_NODE:
8778 // foo
8779 // ^^^
8780 //
8781 // foo.bar
8782 // ^^^^^^^
8783 //
8784 // foo.bar() {}
8785 // ^^^^^^^^^^^^
8786 pm_compile_call_node(iseq, (const pm_call_node_t *) node, ret, popped, scope_node);
8787 return;
8788 case PM_CALL_AND_WRITE_NODE: {
8789 // foo.bar &&= baz
8790 // ^^^^^^^^^^^^^^^
8791 const pm_call_and_write_node_t *cast = (const pm_call_and_write_node_t *) node;
8792 pm_compile_call_and_or_write_node(iseq, true, cast->receiver, cast->value, cast->write_name, cast->read_name, PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION), &location, ret, popped, scope_node);
8793 return;
8794 }
8795 case PM_CALL_OR_WRITE_NODE: {
8796 // foo.bar ||= baz
8797 // ^^^^^^^^^^^^^^^
8798 const pm_call_or_write_node_t *cast = (const pm_call_or_write_node_t *) node;
8799 pm_compile_call_and_or_write_node(iseq, false, cast->receiver, cast->value, cast->write_name, cast->read_name, PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION), &location, ret, popped, scope_node);
8800 return;
8801 }
8802 case PM_CALL_OPERATOR_WRITE_NODE:
8803 // foo.bar += baz
8804 // ^^^^^^^^^^^^^^^
8805 //
8806 // Call operator writes occur when you have a call node on the left-hand
8807 // side of a write operator that is not `=`. As an example,
8808 // `foo.bar *= 1`. This breaks down to caching the receiver on the
8809 // stack and then performing three method calls, one to read the value,
8810 // one to compute the result, and one to write the result back to the
8811 // receiver.
8812 pm_compile_call_operator_write_node(iseq, (const pm_call_operator_write_node_t *) node, &location, ret, popped, scope_node);
8813 return;
8814 case PM_CASE_NODE:
8815 // case foo; when bar; end
8816 // ^^^^^^^^^^^^^^^^^^^^^^^
8817 pm_compile_case_node(iseq, (const pm_case_node_t *) node, &location, ret, popped, scope_node);
8818 return;
8819 case PM_CASE_MATCH_NODE:
8820 // case foo; in bar; end
8821 // ^^^^^^^^^^^^^^^^^^^^^
8822 //
8823 // If you use the `case` keyword to create a case match node, it will
8824 // match against all of the `in` clauses until it finds one that
8825 // matches. If it doesn't find one, it can optionally fall back to an
8826 // `else` clause. If none is present and a match wasn't found, it will
8827 // raise an appropriate error.
8828 pm_compile_case_match_node(iseq, (const pm_case_match_node_t *) node, &location, ret, popped, scope_node);
8829 return;
8830 case PM_CLASS_NODE: {
8831 // class Foo; end
8832 // ^^^^^^^^^^^^^^
8833 const pm_class_node_t *cast = (const pm_class_node_t *) node;
8834
8835 ID class_id = pm_constant_id_lookup(scope_node, cast->name);
8836 VALUE class_name = rb_str_freeze(rb_sprintf("<class:%"PRIsVALUE">", rb_id2str(class_id)));
8837
8838 pm_scope_node_t next_scope_node;
8839 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
8840
8841 const rb_iseq_t *class_iseq = NEW_CHILD_ISEQ(&next_scope_node, class_name, ISEQ_TYPE_CLASS, location.line);
8842 pm_scope_node_destroy(&next_scope_node);
8843
8844 // TODO: Once we merge constant path nodes correctly, fix this flag
8845 const int flags = VM_DEFINECLASS_TYPE_CLASS |
8846 (cast->superclass ? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS : 0) |
8847 pm_compile_class_path(iseq, cast->constant_path, &location, ret, false, scope_node);
8848
8849 if (cast->superclass) {
8850 PM_COMPILE_NOT_POPPED(cast->superclass);
8851 }
8852 else {
8853 PUSH_INSN(ret, location, putnil);
8854 }
8855
8856 {
8857 VALUE operand = ID2SYM(class_id);
8858 PUSH_INSN3(ret, location, defineclass, operand, class_iseq, INT2FIX(flags));
8859 }
8860 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)class_iseq);
8861
8862 if (popped) PUSH_INSN(ret, location, pop);
8863 return;
8864 }
8865 case PM_CLASS_VARIABLE_AND_WRITE_NODE: {
8866 // @@foo &&= bar
8867 // ^^^^^^^^^^^^^
8869 LABEL *end_label = NEW_LABEL(location.line);
8870
8871 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
8872 VALUE name = ID2SYM(name_id);
8873
8874 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
8875 if (!popped) PUSH_INSN(ret, location, dup);
8876
8877 PUSH_INSNL(ret, location, branchunless, end_label);
8878 if (!popped) PUSH_INSN(ret, location, pop);
8879
8880 PM_COMPILE_NOT_POPPED(cast->value);
8881 if (!popped) PUSH_INSN(ret, location, dup);
8882
8883 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
8884 PUSH_LABEL(ret, end_label);
8885
8886 return;
8887 }
8888 case PM_CLASS_VARIABLE_OPERATOR_WRITE_NODE: {
8889 // @@foo += bar
8890 // ^^^^^^^^^^^^
8892
8893 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
8894 VALUE name = ID2SYM(name_id);
8895
8896 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
8897 PM_COMPILE_NOT_POPPED(cast->value);
8898
8899 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
8900 int flags = VM_CALL_ARGS_SIMPLE;
8901 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
8902
8903 if (!popped) PUSH_INSN(ret, location, dup);
8904 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
8905
8906 return;
8907 }
8908 case PM_CLASS_VARIABLE_OR_WRITE_NODE: {
8909 // @@foo ||= bar
8910 // ^^^^^^^^^^^^^
8912 LABEL *end_label = NEW_LABEL(location.line);
8913 LABEL *start_label = NEW_LABEL(location.line);
8914
8915 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
8916 VALUE name = ID2SYM(name_id);
8917
8918 PUSH_INSN(ret, location, putnil);
8919 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CVAR), name, Qtrue);
8920 PUSH_INSNL(ret, location, branchunless, start_label);
8921
8922 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
8923 if (!popped) PUSH_INSN(ret, location, dup);
8924
8925 PUSH_INSNL(ret, location, branchif, end_label);
8926 if (!popped) PUSH_INSN(ret, location, pop);
8927
8928 PUSH_LABEL(ret, start_label);
8929 PM_COMPILE_NOT_POPPED(cast->value);
8930 if (!popped) PUSH_INSN(ret, location, dup);
8931
8932 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
8933 PUSH_LABEL(ret, end_label);
8934
8935 return;
8936 }
8937 case PM_CLASS_VARIABLE_READ_NODE: {
8938 // @@foo
8939 // ^^^^^
8940 if (!popped) {
8942 ID name = pm_constant_id_lookup(scope_node, cast->name);
8943 PUSH_INSN2(ret, location, getclassvariable, ID2SYM(name), get_cvar_ic_value(iseq, name));
8944 }
8945 return;
8946 }
8947 case PM_CLASS_VARIABLE_WRITE_NODE: {
8948 // @@foo = 1
8949 // ^^^^^^^^^
8951 PM_COMPILE_NOT_POPPED(cast->value);
8952 if (!popped) PUSH_INSN(ret, location, dup);
8953
8954 ID name = pm_constant_id_lookup(scope_node, cast->name);
8955 PUSH_INSN2(ret, location, setclassvariable, ID2SYM(name), get_cvar_ic_value(iseq, name));
8956
8957 return;
8958 }
8959 case PM_CONSTANT_PATH_NODE: {
8960 // Foo::Bar
8961 // ^^^^^^^^
8962 VALUE parts;
8963
8964 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache && ((parts = pm_constant_path_parts(node, scope_node)) != Qnil)) {
8965 ISEQ_BODY(iseq)->ic_size++;
8966 RB_OBJ_SET_SHAREABLE(parts);
8967 PUSH_INSN1(ret, location, opt_getconstant_path, parts);
8968 }
8969 else {
8970 DECL_ANCHOR(prefix);
8971 DECL_ANCHOR(body);
8972
8973 pm_compile_constant_path(iseq, node, prefix, body, popped, scope_node);
8974 if (LIST_INSN_SIZE_ZERO(prefix)) {
8975 PUSH_INSN(ret, location, putnil);
8976 }
8977 else {
8978 PUSH_SEQ(ret, prefix);
8979 }
8980
8981 PUSH_SEQ(ret, body);
8982 }
8983
8984 if (popped) PUSH_INSN(ret, location, pop);
8985 return;
8986 }
8987 case PM_CONSTANT_PATH_AND_WRITE_NODE: {
8988 // Foo::Bar &&= baz
8989 // ^^^^^^^^^^^^^^^^
8991 pm_compile_constant_path_and_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
8992 return;
8993 }
8994 case PM_CONSTANT_PATH_OR_WRITE_NODE: {
8995 // Foo::Bar ||= baz
8996 // ^^^^^^^^^^^^^^^^
8998 pm_compile_constant_path_or_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
8999 return;
9000 }
9001 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE: {
9002 // Foo::Bar += baz
9003 // ^^^^^^^^^^^^^^^
9005 pm_compile_constant_path_operator_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9006 return;
9007 }
9008 case PM_CONSTANT_PATH_WRITE_NODE: {
9009 // Foo::Bar = 1
9010 // ^^^^^^^^^^^^
9012 pm_compile_constant_path_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9013 return;
9014 }
9015 case PM_CONSTANT_READ_NODE: {
9016 // Foo
9017 // ^^^
9018 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
9019 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9020
9021 pm_compile_constant_read(iseq, name, &cast->base.location, location.node_id, ret, scope_node);
9022 if (popped) PUSH_INSN(ret, location, pop);
9023
9024 return;
9025 }
9026 case PM_CONSTANT_AND_WRITE_NODE: {
9027 // Foo &&= bar
9028 // ^^^^^^^^^^^
9030 pm_compile_constant_and_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9031 return;
9032 }
9033 case PM_CONSTANT_OR_WRITE_NODE: {
9034 // Foo ||= bar
9035 // ^^^^^^^^^^^
9036 const pm_constant_or_write_node_t *cast = (const pm_constant_or_write_node_t *) node;
9037 pm_compile_constant_or_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9038 return;
9039 }
9040 case PM_CONSTANT_OPERATOR_WRITE_NODE: {
9041 // Foo += bar
9042 // ^^^^^^^^^^
9044 pm_compile_constant_operator_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9045 return;
9046 }
9047 case PM_CONSTANT_WRITE_NODE: {
9048 // Foo = 1
9049 // ^^^^^^^
9050 const pm_constant_write_node_t *cast = (const pm_constant_write_node_t *) node;
9051 pm_compile_constant_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9052 return;
9053 }
9054 case PM_DEF_NODE: {
9055 // def foo; end
9056 // ^^^^^^^^^^^^
9057 //
9058 // def self.foo; end
9059 // ^^^^^^^^^^^^^^^^^
9060 const pm_def_node_t *cast = (const pm_def_node_t *) node;
9061 ID method_name = pm_constant_id_lookup(scope_node, cast->name);
9062
9063 pm_scope_node_t next_scope_node;
9064 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
9065
9066 rb_iseq_t *method_iseq = NEW_ISEQ(&next_scope_node, rb_id2str(method_name), ISEQ_TYPE_METHOD, location.line);
9067 pm_scope_node_destroy(&next_scope_node);
9068
9069 if (cast->receiver) {
9070 PM_COMPILE_NOT_POPPED(cast->receiver);
9071 PUSH_INSN2(ret, location, definesmethod, ID2SYM(method_name), method_iseq);
9072 }
9073 else {
9074 PUSH_INSN2(ret, location, definemethod, ID2SYM(method_name), method_iseq);
9075 }
9076 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) method_iseq);
9077
9078 if (!popped) {
9079 PUSH_INSN1(ret, location, putobject, ID2SYM(method_name));
9080 }
9081
9082 return;
9083 }
9084 case PM_DEFINED_NODE: {
9085 // defined?(a)
9086 // ^^^^^^^^^^^
9087 const pm_defined_node_t *cast = (const pm_defined_node_t *) node;
9088 pm_compile_defined_expr(iseq, cast->value, &location, ret, popped, scope_node, false);
9089 return;
9090 }
9091 case PM_EMBEDDED_STATEMENTS_NODE: {
9092 // "foo #{bar}"
9093 // ^^^^^^
9095
9096 if (cast->statements != NULL) {
9097 PM_COMPILE((const pm_node_t *) (cast->statements));
9098 }
9099 else {
9100 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
9101 }
9102
9103 if (popped) PUSH_INSN(ret, location, pop);
9104 return;
9105 }
9106 case PM_EMBEDDED_VARIABLE_NODE: {
9107 // "foo #@bar"
9108 // ^^^^^
9109 const pm_embedded_variable_node_t *cast = (const pm_embedded_variable_node_t *) node;
9110 PM_COMPILE(cast->variable);
9111 return;
9112 }
9113 case PM_FALSE_NODE: {
9114 // false
9115 // ^^^^^
9116 if (!popped) {
9117 PUSH_INSN1(ret, location, putobject, Qfalse);
9118 }
9119 return;
9120 }
9121 case PM_ENSURE_NODE: {
9122 const pm_ensure_node_t *cast = (const pm_ensure_node_t *) node;
9123
9124 if (cast->statements != NULL) {
9125 PM_COMPILE((const pm_node_t *) cast->statements);
9126 }
9127
9128 return;
9129 }
9130 case PM_ELSE_NODE: {
9131 // if foo then bar else baz end
9132 // ^^^^^^^^^^^^
9133 const pm_else_node_t *cast = (const pm_else_node_t *) node;
9134
9135 if (cast->statements != NULL) {
9136 PM_COMPILE((const pm_node_t *) cast->statements);
9137 }
9138 else if (!popped) {
9139 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
9140 }
9141
9142 return;
9143 }
9144 case PM_FLIP_FLOP_NODE: {
9145 // if foo .. bar; end
9146 // ^^^^^^^^^^
9147 const pm_flip_flop_node_t *cast = (const pm_flip_flop_node_t *) node;
9148
9149 LABEL *final_label = NEW_LABEL(location.line);
9150 LABEL *then_label = NEW_LABEL(location.line);
9151 LABEL *else_label = NEW_LABEL(location.line);
9152
9153 pm_compile_flip_flop(cast, else_label, then_label, iseq, location.line, ret, popped, scope_node);
9154
9155 PUSH_LABEL(ret, then_label);
9156 PUSH_INSN1(ret, location, putobject, Qtrue);
9157 PUSH_INSNL(ret, location, jump, final_label);
9158 PUSH_LABEL(ret, else_label);
9159 PUSH_INSN1(ret, location, putobject, Qfalse);
9160 PUSH_LABEL(ret, final_label);
9161
9162 return;
9163 }
9164 case PM_FLOAT_NODE: {
9165 // 1.0
9166 // ^^^
9167 if (!popped) {
9168 VALUE operand = parse_float((const pm_float_node_t *) node);
9169 PUSH_INSN1(ret, location, putobject, operand);
9170 }
9171 return;
9172 }
9173 case PM_FOR_NODE: {
9174 // for foo in bar do end
9175 // ^^^^^^^^^^^^^^^^^^^^^
9176 const pm_for_node_t *cast = (const pm_for_node_t *) node;
9177
9178 LABEL *retry_label = NEW_LABEL(location.line);
9179 LABEL *retry_end_l = NEW_LABEL(location.line);
9180
9181 // First, compile the collection that we're going to be iterating over.
9182 PUSH_LABEL(ret, retry_label);
9183 PM_COMPILE_NOT_POPPED(cast->collection);
9184
9185 // Next, create the new scope that is going to contain the block that
9186 // will be passed to the each method.
9187 pm_scope_node_t next_scope_node;
9188 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
9189
9190 const rb_iseq_t *child_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location.line);
9191 pm_scope_node_destroy(&next_scope_node);
9192
9193 const rb_iseq_t *prev_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9194 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq;
9195
9196 // Now, create the method call to each that will be used to iterate over
9197 // the collection, and pass the newly created iseq as the block.
9198 PUSH_SEND_WITH_BLOCK(ret, location, idEach, INT2FIX(0), child_iseq);
9199 pm_compile_retry_end_label(iseq, ret, retry_end_l);
9200
9201 if (popped) PUSH_INSN(ret, location, pop);
9202 ISEQ_COMPILE_DATA(iseq)->current_block = prev_block;
9203 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, child_iseq, retry_end_l);
9204 return;
9205 }
9206 case PM_FORWARDING_ARGUMENTS_NODE:
9207 rb_bug("Cannot compile a ForwardingArgumentsNode directly\n");
9208 return;
9209 case PM_FORWARDING_SUPER_NODE:
9210 // super
9211 // ^^^^^
9212 //
9213 // super {}
9214 // ^^^^^^^^
9215 pm_compile_forwarding_super_node(iseq, (const pm_forwarding_super_node_t *) node, &location, ret, popped, scope_node);
9216 return;
9217 case PM_GLOBAL_VARIABLE_AND_WRITE_NODE: {
9218 // $foo &&= bar
9219 // ^^^^^^^^^^^^
9221 LABEL *end_label = NEW_LABEL(location.line);
9222
9223 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9224 PUSH_INSN1(ret, location, getglobal, name);
9225 if (!popped) PUSH_INSN(ret, location, dup);
9226
9227 PUSH_INSNL(ret, location, branchunless, end_label);
9228 if (!popped) PUSH_INSN(ret, location, pop);
9229
9230 PM_COMPILE_NOT_POPPED(cast->value);
9231 if (!popped) PUSH_INSN(ret, location, dup);
9232
9233 PUSH_INSN1(ret, location, setglobal, name);
9234 PUSH_LABEL(ret, end_label);
9235
9236 return;
9237 }
9238 case PM_GLOBAL_VARIABLE_OPERATOR_WRITE_NODE: {
9239 // $foo += bar
9240 // ^^^^^^^^^^^
9242
9243 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9244 PUSH_INSN1(ret, location, getglobal, name);
9245 PM_COMPILE_NOT_POPPED(cast->value);
9246
9247 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
9248 int flags = VM_CALL_ARGS_SIMPLE;
9249 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
9250
9251 if (!popped) PUSH_INSN(ret, location, dup);
9252 PUSH_INSN1(ret, location, setglobal, name);
9253
9254 return;
9255 }
9256 case PM_GLOBAL_VARIABLE_OR_WRITE_NODE: {
9257 // $foo ||= bar
9258 // ^^^^^^^^^^^^
9260 LABEL *set_label = NEW_LABEL(location.line);
9261 LABEL *end_label = NEW_LABEL(location.line);
9262
9263 PUSH_INSN(ret, location, putnil);
9264 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9265
9266 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_GVAR), name, Qtrue);
9267 PUSH_INSNL(ret, location, branchunless, set_label);
9268
9269 PUSH_INSN1(ret, location, getglobal, name);
9270 if (!popped) PUSH_INSN(ret, location, dup);
9271
9272 PUSH_INSNL(ret, location, branchif, end_label);
9273 if (!popped) PUSH_INSN(ret, location, pop);
9274
9275 PUSH_LABEL(ret, set_label);
9276 PM_COMPILE_NOT_POPPED(cast->value);
9277 if (!popped) PUSH_INSN(ret, location, dup);
9278
9279 PUSH_INSN1(ret, location, setglobal, name);
9280 PUSH_LABEL(ret, end_label);
9281
9282 return;
9283 }
9284 case PM_GLOBAL_VARIABLE_READ_NODE: {
9285 // $foo
9286 // ^^^^
9288 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9289
9290 PUSH_INSN1(ret, location, getglobal, name);
9291 if (popped) PUSH_INSN(ret, location, pop);
9292
9293 return;
9294 }
9295 case PM_GLOBAL_VARIABLE_WRITE_NODE: {
9296 // $foo = 1
9297 // ^^^^^^^^
9299 PM_COMPILE_NOT_POPPED(cast->value);
9300 if (!popped) PUSH_INSN(ret, location, dup);
9301
9302 ID name = pm_constant_id_lookup(scope_node, cast->name);
9303 PUSH_INSN1(ret, location, setglobal, ID2SYM(name));
9304
9305 return;
9306 }
9307 case PM_HASH_NODE: {
9308 // {}
9309 // ^^
9310 //
9311 // If every node in the hash is static, then we can compile the entire
9312 // hash now instead of later.
9313 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
9314 // We're only going to compile this node if it's not popped. If it
9315 // is popped, then we know we don't need to do anything since it's
9316 // statically known.
9317 if (!popped) {
9318 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
9319
9320 if (cast->elements.size == 0) {
9321 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
9322 }
9323 else {
9324 VALUE value = pm_static_literal_value(iseq, node, scope_node);
9325 PUSH_INSN1(ret, location, duphash, value);
9326 RB_OBJ_WRITTEN(iseq, Qundef, value);
9327 }
9328 }
9329 }
9330 else {
9331 // Here since we know there are possible side-effects inside the
9332 // hash contents, we're going to build it entirely at runtime. We'll
9333 // do this by pushing all of the key-value pairs onto the stack and
9334 // then combining them with newhash.
9335 //
9336 // If this hash is popped, then this serves only to ensure we enact
9337 // all side-effects (like method calls) that are contained within
9338 // the hash contents.
9339 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
9340 const pm_node_list_t *elements = &cast->elements;
9341
9342 if (popped) {
9343 // If this hash is popped, then we can iterate through each
9344 // element and compile it. The result of each compilation will
9345 // only include the side effects of the element itself.
9346 for (size_t index = 0; index < elements->size; index++) {
9347 PM_COMPILE_POPPED(elements->nodes[index]);
9348 }
9349 }
9350 else {
9351 pm_compile_hash_elements(iseq, node, elements, 0, Qundef, false, ret, scope_node);
9352 }
9353 }
9354
9355 return;
9356 }
9357 case PM_IF_NODE: {
9358 // if foo then bar end
9359 // ^^^^^^^^^^^^^^^^^^^
9360 //
9361 // bar if foo
9362 // ^^^^^^^^^^
9363 //
9364 // foo ? bar : baz
9365 // ^^^^^^^^^^^^^^^
9366 const pm_if_node_t *cast = (const pm_if_node_t *) node;
9367 pm_compile_conditional(iseq, &location, PM_IF_NODE, (const pm_node_t *) cast, cast->statements, cast->subsequent, cast->predicate, ret, popped, scope_node);
9368 return;
9369 }
9370 case PM_IMAGINARY_NODE: {
9371 // 1i
9372 // ^^
9373 if (!popped) {
9374 VALUE operand = parse_imaginary((const pm_imaginary_node_t *) node);
9375 PUSH_INSN1(ret, location, putobject, operand);
9376 }
9377 return;
9378 }
9379 case PM_IMPLICIT_NODE: {
9380 // Implicit nodes mark places in the syntax tree where explicit syntax
9381 // was omitted, but implied. For example,
9382 //
9383 // { foo: }
9384 //
9385 // In this case a method call/local variable read is implied by virtue
9386 // of the missing value. To compile these nodes, we simply compile the
9387 // value that is implied, which is helpfully supplied by the parser.
9388 const pm_implicit_node_t *cast = (const pm_implicit_node_t *) node;
9389 PM_COMPILE(cast->value);
9390 return;
9391 }
9392 case PM_IN_NODE: {
9393 // In nodes are handled by the case match node directly, so we should
9394 // never end up hitting them through this path.
9395 rb_bug("Should not ever enter an in node directly");
9396 return;
9397 }
9398 case PM_INDEX_OPERATOR_WRITE_NODE: {
9399 // foo[bar] += baz
9400 // ^^^^^^^^^^^^^^^
9402 pm_compile_index_operator_write_node(iseq, cast, &location, ret, popped, scope_node);
9403 return;
9404 }
9405 case PM_INDEX_AND_WRITE_NODE: {
9406 // foo[bar] &&= baz
9407 // ^^^^^^^^^^^^^^^^
9408 const pm_index_and_write_node_t *cast = (const pm_index_and_write_node_t *) node;
9409 pm_compile_index_control_flow_write_node(iseq, node, cast->receiver, cast->arguments, cast->block, cast->value, &location, ret, popped, scope_node);
9410 return;
9411 }
9412 case PM_INDEX_OR_WRITE_NODE: {
9413 // foo[bar] ||= baz
9414 // ^^^^^^^^^^^^^^^^
9415 const pm_index_or_write_node_t *cast = (const pm_index_or_write_node_t *) node;
9416 pm_compile_index_control_flow_write_node(iseq, node, cast->receiver, cast->arguments, cast->block, cast->value, &location, ret, popped, scope_node);
9417 return;
9418 }
9419 case PM_INSTANCE_VARIABLE_AND_WRITE_NODE: {
9420 // @foo &&= bar
9421 // ^^^^^^^^^^^^
9423 LABEL *end_label = NEW_LABEL(location.line);
9424
9425 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
9426 VALUE name = ID2SYM(name_id);
9427
9428 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9429 if (!popped) PUSH_INSN(ret, location, dup);
9430
9431 PUSH_INSNL(ret, location, branchunless, end_label);
9432 if (!popped) PUSH_INSN(ret, location, pop);
9433
9434 PM_COMPILE_NOT_POPPED(cast->value);
9435 if (!popped) PUSH_INSN(ret, location, dup);
9436
9437 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9438 PUSH_LABEL(ret, end_label);
9439
9440 return;
9441 }
9442 case PM_INSTANCE_VARIABLE_OPERATOR_WRITE_NODE: {
9443 // @foo += bar
9444 // ^^^^^^^^^^^
9446
9447 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
9448 VALUE name = ID2SYM(name_id);
9449
9450 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9451 PM_COMPILE_NOT_POPPED(cast->value);
9452
9453 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
9454 int flags = VM_CALL_ARGS_SIMPLE;
9455 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
9456
9457 if (!popped) PUSH_INSN(ret, location, dup);
9458 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9459
9460 return;
9461 }
9462 case PM_INSTANCE_VARIABLE_OR_WRITE_NODE: {
9463 // @foo ||= bar
9464 // ^^^^^^^^^^^^
9466 LABEL *end_label = NEW_LABEL(location.line);
9467
9468 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
9469 VALUE name = ID2SYM(name_id);
9470
9471 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9472 if (!popped) PUSH_INSN(ret, location, dup);
9473
9474 PUSH_INSNL(ret, location, branchif, end_label);
9475 if (!popped) PUSH_INSN(ret, location, pop);
9476
9477 PM_COMPILE_NOT_POPPED(cast->value);
9478 if (!popped) PUSH_INSN(ret, location, dup);
9479
9480 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9481 PUSH_LABEL(ret, end_label);
9482
9483 return;
9484 }
9485 case PM_INSTANCE_VARIABLE_READ_NODE: {
9486 // @foo
9487 // ^^^^
9488 if (!popped) {
9490 ID name = pm_constant_id_lookup(scope_node, cast->name);
9491 PUSH_INSN2(ret, location, getinstancevariable, ID2SYM(name), get_ivar_ic_value(iseq, name));
9492 }
9493 return;
9494 }
9495 case PM_INSTANCE_VARIABLE_WRITE_NODE: {
9496 // @foo = 1
9497 // ^^^^^^^^
9499 PM_COMPILE_NOT_POPPED(cast->value);
9500 if (!popped) PUSH_INSN(ret, location, dup);
9501
9502 ID name = pm_constant_id_lookup(scope_node, cast->name);
9503 PUSH_INSN2(ret, location, setinstancevariable, ID2SYM(name), get_ivar_ic_value(iseq, name));
9504
9505 return;
9506 }
9507 case PM_INTEGER_NODE: {
9508 // 1
9509 // ^
9510 if (!popped) {
9511 VALUE operand = parse_integer((const pm_integer_node_t *) node);
9512 PUSH_INSN1(ret, location, putobject, operand);
9513 }
9514 return;
9515 }
9516 case PM_INTERPOLATED_MATCH_LAST_LINE_NODE: {
9517 // if /foo #{bar}/ then end
9518 // ^^^^^^^^^^^^
9519 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
9520 if (!popped) {
9521 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
9522 PUSH_INSN1(ret, location, putobject, regexp);
9523 }
9524 }
9525 else {
9526 pm_compile_regexp_dynamic(iseq, node, &((const pm_interpolated_match_last_line_node_t *) node)->parts, &location, ret, popped, scope_node);
9527 }
9528
9529 PUSH_INSN1(ret, location, getglobal, rb_id2sym(idLASTLINE));
9530 PUSH_SEND(ret, location, idEqTilde, INT2NUM(1));
9531 if (popped) PUSH_INSN(ret, location, pop);
9532
9533 return;
9534 }
9535 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
9536 // /foo #{bar}/
9537 // ^^^^^^^^^^^^
9538 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ONCE)) {
9539 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
9540 const rb_iseq_t *block_iseq = NULL;
9541 int ise_index = ISEQ_BODY(iseq)->ise_size++;
9542
9543 pm_scope_node_t next_scope_node;
9544 pm_scope_node_init(node, &next_scope_node, scope_node);
9545
9546 block_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_PLAIN, location.line);
9547 pm_scope_node_destroy(&next_scope_node);
9548
9549 ISEQ_COMPILE_DATA(iseq)->current_block = block_iseq;
9550 PUSH_INSN2(ret, location, once, block_iseq, INT2FIX(ise_index));
9551 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
9552
9553 if (popped) PUSH_INSN(ret, location, pop);
9554 return;
9555 }
9556
9557 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
9558 if (!popped) {
9559 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
9560 PUSH_INSN1(ret, location, putobject, regexp);
9561 }
9562 }
9563 else {
9564 pm_compile_regexp_dynamic(iseq, node, &((const pm_interpolated_regular_expression_node_t *) node)->parts, &location, ret, popped, scope_node);
9565 if (popped) PUSH_INSN(ret, location, pop);
9566 }
9567
9568 return;
9569 }
9570 case PM_INTERPOLATED_STRING_NODE: {
9571 // "foo #{bar}"
9572 // ^^^^^^^^^^^^
9573 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
9574 if (!popped) {
9575 VALUE string = pm_static_literal_value(iseq, node, scope_node);
9576
9577 if (PM_NODE_FLAG_P(node, PM_INTERPOLATED_STRING_NODE_FLAGS_FROZEN)) {
9578 PUSH_INSN1(ret, location, putobject, string);
9579 }
9580 else if (PM_NODE_FLAG_P(node, PM_INTERPOLATED_STRING_NODE_FLAGS_MUTABLE)) {
9581 PUSH_INSN1(ret, location, putstring, string);
9582 }
9583 else {
9584 PUSH_INSN1(ret, location, putchilledstring, string);
9585 }
9586 }
9587 }
9588 else {
9590 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, popped, scope_node, NULL, NULL, PM_NODE_FLAG_P(cast, PM_INTERPOLATED_STRING_NODE_FLAGS_MUTABLE), PM_NODE_FLAG_P(cast, PM_INTERPOLATED_STRING_NODE_FLAGS_FROZEN));
9591 if (length > 1) PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
9592 if (popped) PUSH_INSN(ret, location, pop);
9593 }
9594
9595 return;
9596 }
9597 case PM_INTERPOLATED_SYMBOL_NODE: {
9598 // :"foo #{bar}"
9599 // ^^^^^^^^^^^^^
9601 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, popped, scope_node, NULL, NULL, false, false);
9602
9603 if (length > 1) {
9604 PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
9605 }
9606
9607 if (!popped) {
9608 PUSH_INSN(ret, location, intern);
9609 }
9610 else {
9611 PUSH_INSN(ret, location, pop);
9612 }
9613
9614 return;
9615 }
9616 case PM_INTERPOLATED_X_STRING_NODE: {
9617 // `foo #{bar}`
9618 // ^^^^^^^^^^^^
9620
9621 PUSH_INSN(ret, location, putself);
9622
9623 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, false, scope_node, NULL, NULL, false, false);
9624 if (length > 1) PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
9625
9626 PUSH_SEND_WITH_FLAG(ret, location, idBackquote, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
9627 if (popped) PUSH_INSN(ret, location, pop);
9628
9629 return;
9630 }
9631 case PM_IT_LOCAL_VARIABLE_READ_NODE: {
9632 // -> { it }
9633 // ^^
9634 if (!popped) {
9635 pm_scope_node_t *current_scope_node = scope_node;
9636 int level = 0;
9637
9638 while (current_scope_node) {
9639 if (current_scope_node->parameters && PM_NODE_TYPE_P(current_scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
9640 PUSH_GETLOCAL(ret, location, current_scope_node->local_table_for_iseq_size, level);
9641 return;
9642 }
9643
9644 current_scope_node = current_scope_node->previous;
9645 level++;
9646 }
9647 rb_bug("Local `it` does not exist");
9648 }
9649
9650 return;
9651 }
9652 case PM_KEYWORD_HASH_NODE: {
9653 // foo(bar: baz)
9654 // ^^^^^^^^
9655 const pm_keyword_hash_node_t *cast = (const pm_keyword_hash_node_t *) node;
9656 const pm_node_list_t *elements = &cast->elements;
9657
9658 const pm_node_t *element;
9659 PM_NODE_LIST_FOREACH(elements, index, element) {
9660 PM_COMPILE(element);
9661 }
9662
9663 if (!popped) PUSH_INSN1(ret, location, newhash, INT2FIX(elements->size * 2));
9664 return;
9665 }
9666 case PM_LAMBDA_NODE: {
9667 // -> {}
9668 // ^^^^^
9669 const pm_lambda_node_t *cast = (const pm_lambda_node_t *) node;
9670
9671 pm_scope_node_t next_scope_node;
9672 pm_scope_node_init(node, &next_scope_node, scope_node);
9673
9674 int opening_lineno = pm_location_line_number(parser, &cast->opening_loc);
9675 const rb_iseq_t *block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, opening_lineno);
9676 pm_scope_node_destroy(&next_scope_node);
9677
9678 VALUE argc = INT2FIX(0);
9679 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
9680 PUSH_CALL_WITH_BLOCK(ret, location, idLambda, argc, block);
9681 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) block);
9682
9683 if (popped) PUSH_INSN(ret, location, pop);
9684 return;
9685 }
9686 case PM_LOCAL_VARIABLE_AND_WRITE_NODE: {
9687 // foo &&= bar
9688 // ^^^^^^^^^^^
9690 LABEL *end_label = NEW_LABEL(location.line);
9691
9692 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9693 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
9694 if (!popped) PUSH_INSN(ret, location, dup);
9695
9696 PUSH_INSNL(ret, location, branchunless, end_label);
9697 if (!popped) PUSH_INSN(ret, location, pop);
9698
9699 PM_COMPILE_NOT_POPPED(cast->value);
9700 if (!popped) PUSH_INSN(ret, location, dup);
9701
9702 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
9703 PUSH_LABEL(ret, end_label);
9704
9705 return;
9706 }
9707 case PM_LOCAL_VARIABLE_OPERATOR_WRITE_NODE: {
9708 // foo += bar
9709 // ^^^^^^^^^^
9711
9712 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9713 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
9714
9715 PM_COMPILE_NOT_POPPED(cast->value);
9716
9717 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
9718 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
9719
9720 if (!popped) PUSH_INSN(ret, location, dup);
9721 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
9722
9723 return;
9724 }
9725 case PM_LOCAL_VARIABLE_OR_WRITE_NODE: {
9726 // foo ||= bar
9727 // ^^^^^^^^^^^
9729
9730 LABEL *set_label = NEW_LABEL(location.line);
9731 LABEL *end_label = NEW_LABEL(location.line);
9732
9733 PUSH_INSN1(ret, location, putobject, Qtrue);
9734 PUSH_INSNL(ret, location, branchunless, set_label);
9735
9736 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9737 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
9738 if (!popped) PUSH_INSN(ret, location, dup);
9739
9740 PUSH_INSNL(ret, location, branchif, end_label);
9741 if (!popped) PUSH_INSN(ret, location, pop);
9742
9743 PUSH_LABEL(ret, set_label);
9744 PM_COMPILE_NOT_POPPED(cast->value);
9745 if (!popped) PUSH_INSN(ret, location, dup);
9746
9747 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
9748 PUSH_LABEL(ret, end_label);
9749
9750 return;
9751 }
9752 case PM_LOCAL_VARIABLE_READ_NODE: {
9753 // foo
9754 // ^^^
9755 if (!popped) {
9757 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9758 PUSH_GETLOCAL(ret, location, index.index, index.level);
9759 }
9760
9761 return;
9762 }
9763 case PM_LOCAL_VARIABLE_WRITE_NODE: {
9764 // foo = 1
9765 // ^^^^^^^
9767 PM_COMPILE_NOT_POPPED(cast->value);
9768 if (!popped) PUSH_INSN(ret, location, dup);
9769
9770 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9771 PUSH_SETLOCAL(ret, location, index.index, index.level);
9772 return;
9773 }
9774 case PM_MATCH_LAST_LINE_NODE: {
9775 // if /foo/ then end
9776 // ^^^^^
9777 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
9778
9779 PUSH_INSN1(ret, location, putobject, regexp);
9780 PUSH_INSN2(ret, location, getspecial, INT2FIX(0), INT2FIX(0));
9781 PUSH_SEND(ret, location, idEqTilde, INT2NUM(1));
9782 if (popped) PUSH_INSN(ret, location, pop);
9783
9784 return;
9785 }
9786 case PM_MATCH_PREDICATE_NODE: {
9787 // foo in bar
9788 // ^^^^^^^^^^
9789 const pm_match_predicate_node_t *cast = (const pm_match_predicate_node_t *) node;
9790
9791 // First, allocate some stack space for the cached return value of any
9792 // calls to #deconstruct.
9793 PUSH_INSN(ret, location, putnil);
9794
9795 // Next, compile the expression that we're going to match against.
9796 PM_COMPILE_NOT_POPPED(cast->value);
9797 PUSH_INSN(ret, location, dup);
9798
9799 // Now compile the pattern that is going to be used to match against the
9800 // expression.
9801 LABEL *matched_label = NEW_LABEL(location.line);
9802 LABEL *unmatched_label = NEW_LABEL(location.line);
9803 LABEL *done_label = NEW_LABEL(location.line);
9804 pm_compile_pattern(iseq, scope_node, cast->pattern, ret, matched_label, unmatched_label, false, true, 2);
9805
9806 // If the pattern did not match, then compile the necessary instructions
9807 // to handle pushing false onto the stack, then jump to the end.
9808 PUSH_LABEL(ret, unmatched_label);
9809 PUSH_INSN(ret, location, pop);
9810 PUSH_INSN(ret, location, pop);
9811
9812 if (!popped) PUSH_INSN1(ret, location, putobject, Qfalse);
9813 PUSH_INSNL(ret, location, jump, done_label);
9814 PUSH_INSN(ret, location, putnil);
9815
9816 // If the pattern did match, then compile the necessary instructions to
9817 // handle pushing true onto the stack, then jump to the end.
9818 PUSH_LABEL(ret, matched_label);
9819 PUSH_INSN1(ret, location, adjuststack, INT2FIX(2));
9820 if (!popped) PUSH_INSN1(ret, location, putobject, Qtrue);
9821 PUSH_INSNL(ret, location, jump, done_label);
9822
9823 PUSH_LABEL(ret, done_label);
9824 return;
9825 }
9826 case PM_MATCH_REQUIRED_NODE:
9827 // foo => bar
9828 // ^^^^^^^^^^
9829 //
9830 // A match required node represents pattern matching against a single
9831 // pattern using the => operator. For example,
9832 //
9833 // foo => bar
9834 //
9835 // This is somewhat analogous to compiling a case match statement with a
9836 // single pattern. In both cases, if the pattern fails it should
9837 // immediately raise an error.
9838 pm_compile_match_required_node(iseq, (const pm_match_required_node_t *) node, &location, ret, popped, scope_node);
9839 return;
9840 case PM_MATCH_WRITE_NODE:
9841 // /(?<foo>foo)/ =~ bar
9842 // ^^^^^^^^^^^^^^^^^^^^
9843 //
9844 // Match write nodes are specialized call nodes that have a regular
9845 // expression with valid named capture groups on the left, the =~
9846 // operator, and some value on the right. The nodes themselves simply
9847 // wrap the call with the local variable targets that will be written
9848 // when the call is executed.
9849 pm_compile_match_write_node(iseq, (const pm_match_write_node_t *) node, &location, ret, popped, scope_node);
9850 return;
9851 case PM_MISSING_NODE:
9852 rb_bug("A pm_missing_node_t should not exist in prism's AST.");
9853 return;
9854 case PM_MODULE_NODE: {
9855 // module Foo; end
9856 // ^^^^^^^^^^^^^^^
9857 const pm_module_node_t *cast = (const pm_module_node_t *) node;
9858
9859 ID module_id = pm_constant_id_lookup(scope_node, cast->name);
9860 VALUE module_name = rb_str_freeze(rb_sprintf("<module:%"PRIsVALUE">", rb_id2str(module_id)));
9861
9862 pm_scope_node_t next_scope_node;
9863 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
9864
9865 const rb_iseq_t *module_iseq = NEW_CHILD_ISEQ(&next_scope_node, module_name, ISEQ_TYPE_CLASS, location.line);
9866 pm_scope_node_destroy(&next_scope_node);
9867
9868 const int flags = VM_DEFINECLASS_TYPE_MODULE | pm_compile_class_path(iseq, cast->constant_path, &location, ret, false, scope_node);
9869 PUSH_INSN(ret, location, putnil);
9870 PUSH_INSN3(ret, location, defineclass, ID2SYM(module_id), module_iseq, INT2FIX(flags));
9871 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) module_iseq);
9872
9873 if (popped) PUSH_INSN(ret, location, pop);
9874 return;
9875 }
9876 case PM_REQUIRED_PARAMETER_NODE: {
9877 // def foo(bar); end
9878 // ^^^
9880 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, 0);
9881
9882 PUSH_SETLOCAL(ret, location, index.index, index.level);
9883 return;
9884 }
9885 case PM_MULTI_WRITE_NODE: {
9886 // foo, bar = baz
9887 // ^^^^^^^^^^^^^^
9888 //
9889 // A multi write node represents writing to multiple values using an =
9890 // operator. Importantly these nodes are only parsed when the left-hand
9891 // side of the operator has multiple targets. The right-hand side of the
9892 // operator having multiple targets represents an implicit array
9893 // instead.
9894 const pm_multi_write_node_t *cast = (const pm_multi_write_node_t *) node;
9895
9896 DECL_ANCHOR(writes);
9897 DECL_ANCHOR(cleanup);
9898
9899 pm_multi_target_state_t state = { 0 };
9900 state.position = popped ? 0 : 1;
9901 pm_compile_multi_target_node(iseq, node, ret, writes, cleanup, scope_node, &state);
9902
9903 PM_COMPILE_NOT_POPPED(cast->value);
9904 if (!popped) PUSH_INSN(ret, location, dup);
9905
9906 PUSH_SEQ(ret, writes);
9907 if (!popped && state.stack_size >= 1) {
9908 // Make sure the value on the right-hand side of the = operator is
9909 // being returned before we pop the parent expressions.
9910 PUSH_INSN1(ret, location, setn, INT2FIX(state.stack_size));
9911 }
9912
9913 // Now, we need to go back and modify the topn instructions in order to
9914 // ensure they can correctly retrieve the parent expressions.
9915 pm_multi_target_state_update(&state);
9916
9917 PUSH_SEQ(ret, cleanup);
9918 return;
9919 }
9920 case PM_NEXT_NODE:
9921 // next
9922 // ^^^^
9923 //
9924 // next foo
9925 // ^^^^^^^^
9926 pm_compile_next_node(iseq, (const pm_next_node_t *) node, &location, ret, popped, scope_node);
9927 return;
9928 case PM_NIL_NODE: {
9929 // nil
9930 // ^^^
9931 if (!popped) {
9932 PUSH_INSN(ret, location, putnil);
9933 }
9934
9935 return;
9936 }
9937 case PM_NO_KEYWORDS_PARAMETER_NODE: {
9938 // def foo(**nil); end
9939 // ^^^^^
9940 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg = TRUE;
9941 return;
9942 }
9943 case PM_NUMBERED_REFERENCE_READ_NODE: {
9944 // $1
9945 // ^^
9946 if (!popped) {
9948
9949 if (cast->number != 0) {
9950 VALUE ref = pm_compile_numbered_reference_ref(cast);
9951 PUSH_INSN2(ret, location, getspecial, INT2FIX(1), ref);
9952 }
9953 else {
9954 PUSH_INSN(ret, location, putnil);
9955 }
9956 }
9957
9958 return;
9959 }
9960 case PM_OR_NODE: {
9961 // a or b
9962 // ^^^^^^
9963 const pm_or_node_t *cast = (const pm_or_node_t *) node;
9964
9965 LABEL *end_label = NEW_LABEL(location.line);
9966 PM_COMPILE_NOT_POPPED(cast->left);
9967
9968 if (!popped) PUSH_INSN(ret, location, dup);
9969 PUSH_INSNL(ret, location, branchif, end_label);
9970
9971 if (!popped) PUSH_INSN(ret, location, pop);
9972 PM_COMPILE(cast->right);
9973 PUSH_LABEL(ret, end_label);
9974
9975 return;
9976 }
9977 case PM_OPTIONAL_PARAMETER_NODE: {
9978 // def foo(bar = 1); end
9979 // ^^^^^^^
9981 PM_COMPILE_NOT_POPPED(cast->value);
9982
9983 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, 0);
9984 PUSH_SETLOCAL(ret, location, index.index, index.level);
9985
9986 return;
9987 }
9988 case PM_PARENTHESES_NODE: {
9989 // ()
9990 // ^^
9991 //
9992 // (1)
9993 // ^^^
9994 const pm_parentheses_node_t *cast = (const pm_parentheses_node_t *) node;
9995
9996 if (cast->body != NULL) {
9997 PM_COMPILE(cast->body);
9998 }
9999 else if (!popped) {
10000 PUSH_INSN(ret, location, putnil);
10001 }
10002
10003 return;
10004 }
10005 case PM_PRE_EXECUTION_NODE: {
10006 // BEGIN {}
10007 // ^^^^^^^^
10008 const pm_pre_execution_node_t *cast = (const pm_pre_execution_node_t *) node;
10009
10010 LINK_ANCHOR *outer_pre = scope_node->pre_execution_anchor;
10011 RUBY_ASSERT(outer_pre != NULL);
10012
10013 // BEGIN{} nodes can be nested, so here we're going to do the same thing
10014 // that we did for the top-level compilation where we create two
10015 // anchors and then join them in the correct order into the resulting
10016 // anchor.
10017 DECL_ANCHOR(inner_pre);
10018 scope_node->pre_execution_anchor = inner_pre;
10019
10020 DECL_ANCHOR(inner_body);
10021
10022 if (cast->statements != NULL) {
10023 const pm_node_list_t *body = &cast->statements->body;
10024
10025 for (size_t index = 0; index < body->size; index++) {
10026 pm_compile_node(iseq, body->nodes[index], inner_body, true, scope_node);
10027 }
10028 }
10029
10030 if (!popped) {
10031 PUSH_INSN(inner_body, location, putnil);
10032 }
10033
10034 // Now that everything has been compiled, join both anchors together
10035 // into the correct outer pre execution anchor, and reset the value so
10036 // that subsequent BEGIN{} nodes can be compiled correctly.
10037 PUSH_SEQ(outer_pre, inner_pre);
10038 PUSH_SEQ(outer_pre, inner_body);
10039 scope_node->pre_execution_anchor = outer_pre;
10040
10041 return;
10042 }
10043 case PM_POST_EXECUTION_NODE: {
10044 // END {}
10045 // ^^^^^^
10046 const rb_iseq_t *child_iseq;
10047 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
10048
10049 pm_scope_node_t next_scope_node;
10050 pm_scope_node_init(node, &next_scope_node, scope_node);
10051 child_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, lineno);
10052 pm_scope_node_destroy(&next_scope_node);
10053
10054 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq;
10055
10056 int is_index = ISEQ_BODY(iseq)->ise_size++;
10057 PUSH_INSN2(ret, location, once, child_iseq, INT2FIX(is_index));
10058 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) child_iseq);
10059 if (popped) PUSH_INSN(ret, location, pop);
10060
10061 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
10062
10063 return;
10064 }
10065 case PM_RANGE_NODE: {
10066 // 0..5
10067 // ^^^^
10068 const pm_range_node_t *cast = (const pm_range_node_t *) node;
10069 bool exclude_end = PM_NODE_FLAG_P(cast, PM_RANGE_FLAGS_EXCLUDE_END);
10070
10071 if (pm_optimizable_range_item_p(cast->left) && pm_optimizable_range_item_p(cast->right)) {
10072 if (!popped) {
10073 const pm_node_t *left = cast->left;
10074 const pm_node_t *right = cast->right;
10075
10076 VALUE val = rb_range_new(
10077 (left && PM_NODE_TYPE_P(left, PM_INTEGER_NODE)) ? parse_integer((const pm_integer_node_t *) left) : Qnil,
10078 (right && PM_NODE_TYPE_P(right, PM_INTEGER_NODE)) ? parse_integer((const pm_integer_node_t *) right) : Qnil,
10079 exclude_end
10080 );
10081
10082 RB_OBJ_SET_SHAREABLE(val);
10083 PUSH_INSN1(ret, location, putobject, val);
10084 }
10085 }
10086 else {
10087 if (cast->left != NULL) {
10088 PM_COMPILE(cast->left);
10089 }
10090 else if (!popped) {
10091 PUSH_INSN(ret, location, putnil);
10092 }
10093
10094 if (cast->right != NULL) {
10095 PM_COMPILE(cast->right);
10096 }
10097 else if (!popped) {
10098 PUSH_INSN(ret, location, putnil);
10099 }
10100
10101 if (!popped) {
10102 PUSH_INSN1(ret, location, newrange, INT2FIX(exclude_end ? 1 : 0));
10103 }
10104 }
10105 return;
10106 }
10107 case PM_RATIONAL_NODE: {
10108 // 1r
10109 // ^^
10110 if (!popped) {
10111 PUSH_INSN1(ret, location, putobject, parse_rational((const pm_rational_node_t *) node));
10112 }
10113 return;
10114 }
10115 case PM_REDO_NODE:
10116 // redo
10117 // ^^^^
10118 pm_compile_redo_node(iseq, &location, ret, popped, scope_node);
10119 return;
10120 case PM_REGULAR_EXPRESSION_NODE: {
10121 // /foo/
10122 // ^^^^^
10123 if (!popped) {
10124 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
10125 PUSH_INSN1(ret, location, putobject, regexp);
10126 }
10127 return;
10128 }
10129 case PM_RESCUE_NODE:
10130 // begin; rescue; end
10131 // ^^^^^^^
10132 pm_compile_rescue_node(iseq, (const pm_rescue_node_t *) node, &location, ret, popped, scope_node);
10133 return;
10134 case PM_RESCUE_MODIFIER_NODE: {
10135 // foo rescue bar
10136 // ^^^^^^^^^^^^^^
10137 const pm_rescue_modifier_node_t *cast = (const pm_rescue_modifier_node_t *) node;
10138
10139 pm_scope_node_t rescue_scope_node;
10140 pm_scope_node_init((const pm_node_t *) cast, &rescue_scope_node, scope_node);
10141
10142 rb_iseq_t *rescue_iseq = NEW_CHILD_ISEQ(
10143 &rescue_scope_node,
10144 rb_str_concat(rb_str_new2("rescue in "), ISEQ_BODY(iseq)->location.label),
10145 ISEQ_TYPE_RESCUE,
10146 pm_node_line_number(parser, cast->rescue_expression)
10147 );
10148
10149 pm_scope_node_destroy(&rescue_scope_node);
10150
10151 LABEL *lstart = NEW_LABEL(location.line);
10152 LABEL *lend = NEW_LABEL(location.line);
10153 LABEL *lcont = NEW_LABEL(location.line);
10154
10155 lstart->rescued = LABEL_RESCUE_BEG;
10156 lend->rescued = LABEL_RESCUE_END;
10157
10158 PUSH_LABEL(ret, lstart);
10159 PM_COMPILE_NOT_POPPED(cast->expression);
10160 PUSH_LABEL(ret, lend);
10161
10162 PUSH_INSN(ret, location, nop);
10163 PUSH_LABEL(ret, lcont);
10164 if (popped) PUSH_INSN(ret, location, pop);
10165
10166 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue_iseq, lcont);
10167 PUSH_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
10168 return;
10169 }
10170 case PM_RETURN_NODE:
10171 // return
10172 // ^^^^^^
10173 //
10174 // return 1
10175 // ^^^^^^^^
10176 pm_compile_return_node(iseq, (const pm_return_node_t *) node, &location, ret, popped, scope_node);
10177 return;
10178 case PM_RETRY_NODE: {
10179 // retry
10180 // ^^^^^
10181 if (ISEQ_BODY(iseq)->type == ISEQ_TYPE_RESCUE) {
10182 PUSH_INSN(ret, location, putnil);
10183 PUSH_INSN1(ret, location, throw, INT2FIX(TAG_RETRY));
10184 if (popped) PUSH_INSN(ret, location, pop);
10185 }
10186 else {
10187 COMPILE_ERROR(iseq, location.line, "Invalid retry");
10188 return;
10189 }
10190 return;
10191 }
10192 case PM_SCOPE_NODE:
10193 pm_compile_scope_node(iseq, (pm_scope_node_t *) node, &location, ret, popped);
10194 return;
10195 case PM_SELF_NODE: {
10196 // self
10197 // ^^^^
10198 if (!popped) {
10199 PUSH_INSN(ret, location, putself);
10200 }
10201 return;
10202 }
10203 case PM_SHAREABLE_CONSTANT_NODE: {
10204 // A value that is being written to a constant that is being marked as
10205 // shared depending on the current lexical context.
10207 pm_node_flags_t shareability = (cast->base.flags & (PM_SHAREABLE_CONSTANT_NODE_FLAGS_LITERAL | PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_EVERYTHING | PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY));
10208
10209 switch (PM_NODE_TYPE(cast->write)) {
10210 case PM_CONSTANT_WRITE_NODE:
10211 pm_compile_constant_write_node(iseq, (const pm_constant_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10212 break;
10213 case PM_CONSTANT_AND_WRITE_NODE:
10214 pm_compile_constant_and_write_node(iseq, (const pm_constant_and_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10215 break;
10216 case PM_CONSTANT_OR_WRITE_NODE:
10217 pm_compile_constant_or_write_node(iseq, (const pm_constant_or_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10218 break;
10219 case PM_CONSTANT_OPERATOR_WRITE_NODE:
10220 pm_compile_constant_operator_write_node(iseq, (const pm_constant_operator_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10221 break;
10222 case PM_CONSTANT_PATH_WRITE_NODE:
10223 pm_compile_constant_path_write_node(iseq, (const pm_constant_path_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10224 break;
10225 case PM_CONSTANT_PATH_AND_WRITE_NODE:
10226 pm_compile_constant_path_and_write_node(iseq, (const pm_constant_path_and_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10227 break;
10228 case PM_CONSTANT_PATH_OR_WRITE_NODE:
10229 pm_compile_constant_path_or_write_node(iseq, (const pm_constant_path_or_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10230 break;
10231 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE:
10232 pm_compile_constant_path_operator_write_node(iseq, (const pm_constant_path_operator_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10233 break;
10234 default:
10235 rb_bug("Unexpected node type for shareable constant write: %s", pm_node_type_to_str(PM_NODE_TYPE(cast->write)));
10236 break;
10237 }
10238
10239 return;
10240 }
10241 case PM_SINGLETON_CLASS_NODE: {
10242 // class << self; end
10243 // ^^^^^^^^^^^^^^^^^^
10244 const pm_singleton_class_node_t *cast = (const pm_singleton_class_node_t *) node;
10245
10246 pm_scope_node_t next_scope_node;
10247 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
10248 const rb_iseq_t *child_iseq = NEW_ISEQ(&next_scope_node, rb_fstring_lit("singleton class"), ISEQ_TYPE_CLASS, location.line);
10249 pm_scope_node_destroy(&next_scope_node);
10250
10251 PM_COMPILE_NOT_POPPED(cast->expression);
10252 PUSH_INSN(ret, location, putnil);
10253
10254 ID singletonclass;
10255 CONST_ID(singletonclass, "singletonclass");
10256 PUSH_INSN3(ret, location, defineclass, ID2SYM(singletonclass), child_iseq, INT2FIX(VM_DEFINECLASS_TYPE_SINGLETON_CLASS));
10257
10258 if (popped) PUSH_INSN(ret, location, pop);
10259 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) child_iseq);
10260
10261 return;
10262 }
10263 case PM_SOURCE_ENCODING_NODE: {
10264 // __ENCODING__
10265 // ^^^^^^^^^^^^
10266 if (!popped) {
10267 VALUE value = pm_static_literal_value(iseq, node, scope_node);
10268 PUSH_INSN1(ret, location, putobject, value);
10269 }
10270 return;
10271 }
10272 case PM_SOURCE_FILE_NODE: {
10273 // __FILE__
10274 // ^^^^^^^^
10275 if (!popped) {
10276 const pm_source_file_node_t *cast = (const pm_source_file_node_t *) node;
10277 VALUE string = pm_source_file_value(cast, scope_node);
10278
10279 if (PM_NODE_FLAG_P(cast, PM_STRING_FLAGS_FROZEN)) {
10280 PUSH_INSN1(ret, location, putobject, string);
10281 }
10282 else if (PM_NODE_FLAG_P(cast, PM_STRING_FLAGS_MUTABLE)) {
10283 PUSH_INSN1(ret, location, putstring, string);
10284 }
10285 else {
10286 PUSH_INSN1(ret, location, putchilledstring, string);
10287 }
10288 }
10289 return;
10290 }
10291 case PM_SOURCE_LINE_NODE: {
10292 // __LINE__
10293 // ^^^^^^^^
10294 if (!popped) {
10295 VALUE value = pm_static_literal_value(iseq, node, scope_node);
10296 PUSH_INSN1(ret, location, putobject, value);
10297 }
10298 return;
10299 }
10300 case PM_SPLAT_NODE: {
10301 // foo(*bar)
10302 // ^^^^
10303 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
10304 if (cast->expression) {
10305 PM_COMPILE(cast->expression);
10306 }
10307
10308 if (!popped) {
10309 PUSH_INSN1(ret, location, splatarray, Qtrue);
10310 }
10311 return;
10312 }
10313 case PM_STATEMENTS_NODE: {
10314 // A list of statements.
10315 const pm_statements_node_t *cast = (const pm_statements_node_t *) node;
10316 const pm_node_list_t *body = &cast->body;
10317
10318 if (body->size > 0) {
10319 for (size_t index = 0; index < body->size - 1; index++) {
10320 PM_COMPILE_POPPED(body->nodes[index]);
10321 }
10322 PM_COMPILE(body->nodes[body->size - 1]);
10323 }
10324 else {
10325 PUSH_INSN(ret, location, putnil);
10326 }
10327 return;
10328 }
10329 case PM_STRING_NODE: {
10330 // "foo"
10331 // ^^^^^
10332 if (!popped) {
10333 const pm_string_node_t *cast = (const pm_string_node_t *) node;
10334 VALUE value = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
10335
10336 if (PM_NODE_FLAG_P(node, PM_STRING_FLAGS_FROZEN)) {
10337 PUSH_INSN1(ret, location, putobject, value);
10338 }
10339 else if (PM_NODE_FLAG_P(node, PM_STRING_FLAGS_MUTABLE)) {
10340 PUSH_INSN1(ret, location, putstring, value);
10341 }
10342 else {
10343 PUSH_INSN1(ret, location, putchilledstring, value);
10344 }
10345 }
10346 return;
10347 }
10348 case PM_SUPER_NODE:
10349 // super()
10350 // super(foo)
10351 // super(...)
10352 pm_compile_super_node(iseq, (const pm_super_node_t *) node, &location, ret, popped, scope_node);
10353 return;
10354 case PM_SYMBOL_NODE: {
10355 // :foo
10356 // ^^^^
10357 if (!popped) {
10358 VALUE value = pm_static_literal_value(iseq, node, scope_node);
10359 PUSH_INSN1(ret, location, putobject, value);
10360 }
10361 return;
10362 }
10363 case PM_TRUE_NODE: {
10364 // true
10365 // ^^^^
10366 if (!popped) {
10367 PUSH_INSN1(ret, location, putobject, Qtrue);
10368 }
10369 return;
10370 }
10371 case PM_UNDEF_NODE: {
10372 // undef foo
10373 // ^^^^^^^^^
10374 const pm_undef_node_t *cast = (const pm_undef_node_t *) node;
10375 const pm_node_list_t *names = &cast->names;
10376
10377 for (size_t index = 0; index < names->size; index++) {
10378 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10379 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CBASE));
10380
10381 PM_COMPILE_NOT_POPPED(names->nodes[index]);
10382 PUSH_SEND(ret, location, id_core_undef_method, INT2NUM(2));
10383
10384 if (index < names->size - 1) {
10385 PUSH_INSN(ret, location, pop);
10386 }
10387 }
10388
10389 if (popped) PUSH_INSN(ret, location, pop);
10390 return;
10391 }
10392 case PM_UNLESS_NODE: {
10393 // unless foo; bar end
10394 // ^^^^^^^^^^^^^^^^^^^
10395 //
10396 // bar unless foo
10397 // ^^^^^^^^^^^^^^
10398 const pm_unless_node_t *cast = (const pm_unless_node_t *) node;
10399 const pm_statements_node_t *statements = NULL;
10400 if (cast->else_clause != NULL) {
10401 statements = ((const pm_else_node_t *) cast->else_clause)->statements;
10402 }
10403
10404 pm_compile_conditional(iseq, &location, PM_UNLESS_NODE, (const pm_node_t *) cast, statements, (const pm_node_t *) cast->statements, cast->predicate, ret, popped, scope_node);
10405 return;
10406 }
10407 case PM_UNTIL_NODE: {
10408 // until foo; bar end
10409 // ^^^^^^^^^^^^^^^^^
10410 //
10411 // bar until foo
10412 // ^^^^^^^^^^^^^
10413 const pm_until_node_t *cast = (const pm_until_node_t *) node;
10414 pm_compile_loop(iseq, &location, cast->base.flags, PM_UNTIL_NODE, (const pm_node_t *) cast, cast->statements, cast->predicate, ret, popped, scope_node);
10415 return;
10416 }
10417 case PM_WHILE_NODE: {
10418 // while foo; bar end
10419 // ^^^^^^^^^^^^^^^^^^
10420 //
10421 // bar while foo
10422 // ^^^^^^^^^^^^^
10423 const pm_while_node_t *cast = (const pm_while_node_t *) node;
10424 pm_compile_loop(iseq, &location, cast->base.flags, PM_WHILE_NODE, (const pm_node_t *) cast, cast->statements, cast->predicate, ret, popped, scope_node);
10425 return;
10426 }
10427 case PM_X_STRING_NODE: {
10428 // `foo`
10429 // ^^^^^
10430 const pm_x_string_node_t *cast = (const pm_x_string_node_t *) node;
10431 VALUE value = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
10432
10433 PUSH_INSN(ret, location, putself);
10434 PUSH_INSN1(ret, location, putobject, value);
10435 PUSH_SEND_WITH_FLAG(ret, location, idBackquote, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
10436 if (popped) PUSH_INSN(ret, location, pop);
10437
10438 return;
10439 }
10440 case PM_YIELD_NODE:
10441 // yield
10442 // ^^^^^
10443 //
10444 // yield 1
10445 // ^^^^^^^
10446 pm_compile_yield_node(iseq, (const pm_yield_node_t *) node, &location, ret, popped, scope_node);
10447 return;
10448 default:
10449 rb_raise(rb_eNotImpError, "node type %s not implemented", pm_node_type_to_str(PM_NODE_TYPE(node)));
10450 return;
10451 }
10452}
10453
10454#undef PM_CONTAINER_P
10455
10457static inline bool
10458pm_iseq_pre_execution_p(rb_iseq_t *iseq)
10459{
10460 switch (ISEQ_BODY(iseq)->type) {
10461 case ISEQ_TYPE_TOP:
10462 case ISEQ_TYPE_EVAL:
10463 case ISEQ_TYPE_MAIN:
10464 return true;
10465 default:
10466 return false;
10467 }
10468}
10469
10477VALUE
10478pm_iseq_compile_node(rb_iseq_t *iseq, pm_scope_node_t *node)
10479{
10480 DECL_ANCHOR(ret);
10481
10482 if (pm_iseq_pre_execution_p(iseq)) {
10483 // Because these ISEQs can have BEGIN{}, we're going to create two
10484 // anchors to compile them, a "pre" and a "body". We'll mark the "pre"
10485 // on the scope node so that when BEGIN{} is found, its contents will be
10486 // added to the "pre" anchor.
10487 DECL_ANCHOR(pre);
10488 node->pre_execution_anchor = pre;
10489
10490 // Now we'll compile the body as normal. We won't compile directly into
10491 // the "ret" anchor yet because we want to add the "pre" anchor to the
10492 // beginning of the "ret" anchor first.
10493 DECL_ANCHOR(body);
10494 pm_compile_node(iseq, (const pm_node_t *) node, body, false, node);
10495
10496 // Now we'll join both anchors together so that the content is in the
10497 // correct order.
10498 PUSH_SEQ(ret, pre);
10499 PUSH_SEQ(ret, body);
10500 }
10501 else {
10502 // In other circumstances, we can just compile the node directly into
10503 // the "ret" anchor.
10504 pm_compile_node(iseq, (const pm_node_t *) node, ret, false, node);
10505 }
10506
10507 CHECK(iseq_setup_insn(iseq, ret));
10508 return iseq_setup(iseq, ret);
10509}
10510
10515void
10516pm_parse_result_free(pm_parse_result_t *result)
10517{
10518 if (result->node.ast_node != NULL) {
10519 pm_node_destroy(&result->parser, result->node.ast_node);
10520 }
10521
10522 if (result->parsed) {
10523 xfree(result->node.constants);
10524 pm_scope_node_destroy(&result->node);
10525 }
10526
10527 pm_parser_free(&result->parser);
10528 pm_string_free(&result->input);
10529 pm_options_free(&result->options);
10530}
10531
10533typedef struct {
10536
10538 int32_t line;
10539
10542
10544 uint32_t column_end;
10546
10548typedef struct {
10550 const char *number_prefix;
10551
10553 const char *blank_prefix;
10554
10556 const char *divider;
10557
10560
10564
10565#define PM_COLOR_BOLD "\033[1m"
10566#define PM_COLOR_GRAY "\033[2m"
10567#define PM_COLOR_RED "\033[1;31m"
10568#define PM_COLOR_RESET "\033[m"
10569#define PM_ERROR_TRUNCATE 30
10570
10571static inline pm_parse_error_t *
10572pm_parse_errors_format_sort(const pm_parser_t *parser, const pm_list_t *error_list, const pm_newline_list_t *newline_list) {
10573 pm_parse_error_t *errors = xcalloc(error_list->size, sizeof(pm_parse_error_t));
10574 if (errors == NULL) return NULL;
10575
10576 int32_t start_line = parser->start_line;
10577 pm_diagnostic_t *finish = (pm_diagnostic_t * )error_list->tail->next;
10578
10579 for (pm_diagnostic_t *error = (pm_diagnostic_t *) error_list->head; error != finish; error = (pm_diagnostic_t *) error->node.next) {
10580 pm_line_column_t start = pm_newline_list_line_column(newline_list, error->location.start, start_line);
10581 pm_line_column_t end = pm_newline_list_line_column(newline_list, error->location.end, start_line);
10582
10583 // We're going to insert this error into the array in sorted order. We
10584 // do this by finding the first error that has a line number greater
10585 // than the current error and then inserting the current error before
10586 // that one.
10587 size_t index = 0;
10588 while (
10589 (index < error_list->size) &&
10590 (errors[index].error != NULL) &&
10591 (
10592 (errors[index].line < start.line) ||
10593 ((errors[index].line == start.line) && (errors[index].column_start < start.column))
10594 )
10595 ) index++;
10596
10597 // Now we're going to shift all of the errors after this one down one
10598 // index to make room for the new error.
10599 if (index + 1 < error_list->size) {
10600 memmove(&errors[index + 1], &errors[index], sizeof(pm_parse_error_t) * (error_list->size - index - 1));
10601 }
10602
10603 // Finally, we'll insert the error into the array.
10604 uint32_t column_end;
10605 if (start.line == end.line) {
10606 column_end = end.column;
10607 } else {
10608 column_end = (uint32_t) (newline_list->offsets[start.line - start_line + 1] - newline_list->offsets[start.line - start_line] - 1);
10609 }
10610
10611 // Ensure we have at least one column of error.
10612 if (start.column == column_end) column_end++;
10613
10614 errors[index] = (pm_parse_error_t) {
10615 .error = error,
10616 .line = start.line,
10617 .column_start = start.column,
10618 .column_end = column_end
10619 };
10620 }
10621
10622 return errors;
10623}
10624
10625/* Append a literal string to the buffer. */
10626#define pm_buffer_append_literal(buffer, str) pm_buffer_append_string(buffer, str, rb_strlen_lit(str))
10627
10628static inline void
10629pm_parse_errors_format_line(const pm_parser_t *parser, const pm_newline_list_t *newline_list, const char *number_prefix, int32_t line, uint32_t column_start, uint32_t column_end, pm_buffer_t *buffer) {
10630 int32_t line_delta = line - parser->start_line;
10631 assert(line_delta >= 0);
10632
10633 size_t index = (size_t) line_delta;
10634 assert(index < newline_list->size);
10635
10636 const uint8_t *start = &parser->start[newline_list->offsets[index]];
10637 const uint8_t *end;
10638
10639 if (index >= newline_list->size - 1) {
10640 end = parser->end;
10641 } else {
10642 end = &parser->start[newline_list->offsets[index + 1]];
10643 }
10644
10645 pm_buffer_append_format(buffer, number_prefix, line);
10646
10647 // Here we determine if we should truncate the end of the line.
10648 bool truncate_end = false;
10649 if ((column_end != 0) && ((end - (start + column_end)) >= PM_ERROR_TRUNCATE)) {
10650 const uint8_t *end_candidate = start + column_end + PM_ERROR_TRUNCATE;
10651
10652 for (const uint8_t *ptr = start; ptr < end_candidate;) {
10653 size_t char_width = parser->encoding->char_width(ptr, parser->end - ptr);
10654
10655 // If we failed to decode a character, then just bail out and
10656 // truncate at the fixed width.
10657 if (char_width == 0) break;
10658
10659 // If this next character would go past the end candidate,
10660 // then we need to truncate before it.
10661 if (ptr + char_width > end_candidate) {
10662 end_candidate = ptr;
10663 break;
10664 }
10665
10666 ptr += char_width;
10667 }
10668
10669 end = end_candidate;
10670 truncate_end = true;
10671 }
10672
10673 // Here we determine if we should truncate the start of the line.
10674 if (column_start >= PM_ERROR_TRUNCATE) {
10675 pm_buffer_append_string(buffer, "... ", 4);
10676 start += column_start;
10677 }
10678
10679 pm_buffer_append_string(buffer, (const char *) start, (size_t) (end - start));
10680
10681 if (truncate_end) {
10682 pm_buffer_append_string(buffer, " ...\n", 5);
10683 } else if (end == parser->end && end[-1] != '\n') {
10684 pm_buffer_append_string(buffer, "\n", 1);
10685 }
10686}
10687
10691static void
10692pm_parse_errors_format(const pm_parser_t *parser, const pm_list_t *error_list, pm_buffer_t *buffer, int highlight, bool inline_messages) {
10693 assert(error_list->size != 0);
10694
10695 // First, we're going to sort all of the errors by line number using an
10696 // insertion sort into a newly allocated array.
10697 const int32_t start_line = parser->start_line;
10698 const pm_newline_list_t *newline_list = &parser->newline_list;
10699
10700 pm_parse_error_t *errors = pm_parse_errors_format_sort(parser, error_list, newline_list);
10701 if (errors == NULL) return;
10702
10703 // Now we're going to determine how we're going to format line numbers and
10704 // blank lines based on the maximum number of digits in the line numbers
10705 // that are going to be displaid.
10706 pm_parse_error_format_t error_format;
10707 int32_t first_line_number = errors[0].line;
10708 int32_t last_line_number = errors[error_list->size - 1].line;
10709
10710 // If we have a maximum line number that is negative, then we're going to
10711 // use the absolute value for comparison but multiple by 10 to additionally
10712 // have a column for the negative sign.
10713 if (first_line_number < 0) first_line_number = (-first_line_number) * 10;
10714 if (last_line_number < 0) last_line_number = (-last_line_number) * 10;
10715 int32_t max_line_number = first_line_number > last_line_number ? first_line_number : last_line_number;
10716
10717 if (max_line_number < 10) {
10718 if (highlight > 0) {
10719 error_format = (pm_parse_error_format_t) {
10720 .number_prefix = PM_COLOR_GRAY "%1" PRIi32 " | " PM_COLOR_RESET,
10721 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10722 .divider = PM_COLOR_GRAY " ~~~~~" PM_COLOR_RESET "\n"
10723 };
10724 } else {
10725 error_format = (pm_parse_error_format_t) {
10726 .number_prefix = "%1" PRIi32 " | ",
10727 .blank_prefix = " | ",
10728 .divider = " ~~~~~\n"
10729 };
10730 }
10731 } else if (max_line_number < 100) {
10732 if (highlight > 0) {
10733 error_format = (pm_parse_error_format_t) {
10734 .number_prefix = PM_COLOR_GRAY "%2" PRIi32 " | " PM_COLOR_RESET,
10735 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10736 .divider = PM_COLOR_GRAY " ~~~~~~" PM_COLOR_RESET "\n"
10737 };
10738 } else {
10739 error_format = (pm_parse_error_format_t) {
10740 .number_prefix = "%2" PRIi32 " | ",
10741 .blank_prefix = " | ",
10742 .divider = " ~~~~~~\n"
10743 };
10744 }
10745 } else if (max_line_number < 1000) {
10746 if (highlight > 0) {
10747 error_format = (pm_parse_error_format_t) {
10748 .number_prefix = PM_COLOR_GRAY "%3" PRIi32 " | " PM_COLOR_RESET,
10749 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10750 .divider = PM_COLOR_GRAY " ~~~~~~~" PM_COLOR_RESET "\n"
10751 };
10752 } else {
10753 error_format = (pm_parse_error_format_t) {
10754 .number_prefix = "%3" PRIi32 " | ",
10755 .blank_prefix = " | ",
10756 .divider = " ~~~~~~~\n"
10757 };
10758 }
10759 } else if (max_line_number < 10000) {
10760 if (highlight > 0) {
10761 error_format = (pm_parse_error_format_t) {
10762 .number_prefix = PM_COLOR_GRAY "%4" PRIi32 " | " PM_COLOR_RESET,
10763 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10764 .divider = PM_COLOR_GRAY " ~~~~~~~~" PM_COLOR_RESET "\n"
10765 };
10766 } else {
10767 error_format = (pm_parse_error_format_t) {
10768 .number_prefix = "%4" PRIi32 " | ",
10769 .blank_prefix = " | ",
10770 .divider = " ~~~~~~~~\n"
10771 };
10772 }
10773 } else {
10774 if (highlight > 0) {
10775 error_format = (pm_parse_error_format_t) {
10776 .number_prefix = PM_COLOR_GRAY "%5" PRIi32 " | " PM_COLOR_RESET,
10777 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10778 .divider = PM_COLOR_GRAY " ~~~~~~~~" PM_COLOR_RESET "\n"
10779 };
10780 } else {
10781 error_format = (pm_parse_error_format_t) {
10782 .number_prefix = "%5" PRIi32 " | ",
10783 .blank_prefix = " | ",
10784 .divider = " ~~~~~~~~\n"
10785 };
10786 }
10787 }
10788
10789 error_format.blank_prefix_length = strlen(error_format.blank_prefix);
10790 error_format.divider_length = strlen(error_format.divider);
10791
10792 // Now we're going to iterate through every error in our error list and
10793 // display it. While we're iterating, we will display some padding lines of
10794 // the source before the error to give some context. We'll be careful not to
10795 // display the same line twice in case the errors are close enough in the
10796 // source.
10797 int32_t last_line = parser->start_line - 1;
10798 uint32_t last_column_start = 0;
10799 const pm_encoding_t *encoding = parser->encoding;
10800
10801 for (size_t index = 0; index < error_list->size; index++) {
10802 pm_parse_error_t *error = &errors[index];
10803
10804 // Here we determine how many lines of padding of the source to display,
10805 // based on the difference from the last line that was displaid.
10806 if (error->line - last_line > 1) {
10807 if (error->line - last_line > 2) {
10808 if ((index != 0) && (error->line - last_line > 3)) {
10809 pm_buffer_append_string(buffer, error_format.divider, error_format.divider_length);
10810 }
10811
10812 pm_buffer_append_string(buffer, " ", 2);
10813 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, error->line - 2, 0, 0, buffer);
10814 }
10815
10816 pm_buffer_append_string(buffer, " ", 2);
10817 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, error->line - 1, 0, 0, buffer);
10818 }
10819
10820 // If this is the first error or we're on a new line, then we'll display
10821 // the line that has the error in it.
10822 if ((index == 0) || (error->line != last_line)) {
10823 if (highlight > 1) {
10824 pm_buffer_append_literal(buffer, PM_COLOR_RED "> " PM_COLOR_RESET);
10825 } else if (highlight > 0) {
10826 pm_buffer_append_literal(buffer, PM_COLOR_BOLD "> " PM_COLOR_RESET);
10827 } else {
10828 pm_buffer_append_literal(buffer, "> ");
10829 }
10830
10831 last_column_start = error->column_start;
10832
10833 // Find the maximum column end of all the errors on this line.
10834 uint32_t column_end = error->column_end;
10835 for (size_t next_index = index + 1; next_index < error_list->size; next_index++) {
10836 if (errors[next_index].line != error->line) break;
10837 if (errors[next_index].column_end > column_end) column_end = errors[next_index].column_end;
10838 }
10839
10840 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, error->line, error->column_start, column_end, buffer);
10841 }
10842
10843 const uint8_t *start = &parser->start[newline_list->offsets[error->line - start_line]];
10844 if (start == parser->end) pm_buffer_append_byte(buffer, '\n');
10845
10846 // Now we'll display the actual error message. We'll do this by first
10847 // putting the prefix to the line, then a bunch of blank spaces
10848 // depending on the column, then as many carets as we need to display
10849 // the width of the error, then the error message itself.
10850 //
10851 // Note that this doesn't take into account the width of the actual
10852 // character when displaid in the terminal. For some east-asian
10853 // languages or emoji, this means it can be thrown off pretty badly. We
10854 // will need to solve this eventually.
10855 pm_buffer_append_string(buffer, " ", 2);
10856 pm_buffer_append_string(buffer, error_format.blank_prefix, error_format.blank_prefix_length);
10857
10858 size_t column = 0;
10859 if (last_column_start >= PM_ERROR_TRUNCATE) {
10860 pm_buffer_append_string(buffer, " ", 4);
10861 column = last_column_start;
10862 }
10863
10864 while (column < error->column_start) {
10865 pm_buffer_append_byte(buffer, ' ');
10866
10867 size_t char_width = encoding->char_width(start + column, parser->end - (start + column));
10868 column += (char_width == 0 ? 1 : char_width);
10869 }
10870
10871 if (highlight > 1) pm_buffer_append_literal(buffer, PM_COLOR_RED);
10872 else if (highlight > 0) pm_buffer_append_literal(buffer, PM_COLOR_BOLD);
10873 pm_buffer_append_byte(buffer, '^');
10874
10875 size_t char_width = encoding->char_width(start + column, parser->end - (start + column));
10876 column += (char_width == 0 ? 1 : char_width);
10877
10878 while (column < error->column_end) {
10879 pm_buffer_append_byte(buffer, '~');
10880
10881 size_t char_width = encoding->char_width(start + column, parser->end - (start + column));
10882 column += (char_width == 0 ? 1 : char_width);
10883 }
10884
10885 if (highlight > 0) pm_buffer_append_literal(buffer, PM_COLOR_RESET);
10886
10887 if (inline_messages) {
10888 pm_buffer_append_byte(buffer, ' ');
10889 assert(error->error != NULL);
10890
10891 const char *message = error->error->message;
10892 pm_buffer_append_string(buffer, message, strlen(message));
10893 }
10894
10895 pm_buffer_append_byte(buffer, '\n');
10896
10897 // Here we determine how many lines of padding to display after the
10898 // error, depending on where the next error is in source.
10899 last_line = error->line;
10900 int32_t next_line;
10901
10902 if (index == error_list->size - 1) {
10903 next_line = (((int32_t) newline_list->size) + parser->start_line);
10904
10905 // If the file ends with a newline, subtract one from our "next_line"
10906 // so that we don't output an extra line at the end of the file
10907 if ((parser->start + newline_list->offsets[newline_list->size - 1]) == parser->end) {
10908 next_line--;
10909 }
10910 }
10911 else {
10912 next_line = errors[index + 1].line;
10913 }
10914
10915 if (next_line - last_line > 1) {
10916 pm_buffer_append_string(buffer, " ", 2);
10917 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, ++last_line, 0, 0, buffer);
10918 }
10919
10920 if (next_line - last_line > 1) {
10921 pm_buffer_append_string(buffer, " ", 2);
10922 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, ++last_line, 0, 0, buffer);
10923 }
10924 }
10925
10926 // Finally, we'll free the array of errors that we allocated.
10927 xfree(errors);
10928}
10929
10930#undef PM_ERROR_TRUNCATE
10931#undef PM_COLOR_GRAY
10932#undef PM_COLOR_RED
10933#undef PM_COLOR_RESET
10934
10941static bool
10942pm_parse_process_error_utf8_p(const pm_parser_t *parser, const pm_location_t *location)
10943{
10944 const size_t start_line = pm_newline_list_line_column(&parser->newline_list, location->start, 1).line;
10945 const size_t end_line = pm_newline_list_line_column(&parser->newline_list, location->end, 1).line;
10946
10947 const uint8_t *start = parser->start + parser->newline_list.offsets[start_line - 1];
10948 const uint8_t *end = ((end_line == parser->newline_list.size) ? parser->end : (parser->start + parser->newline_list.offsets[end_line]));
10949 size_t width;
10950
10951 while (start < end) {
10952 if ((width = pm_encoding_utf_8_char_width(start, end - start)) == 0) return false;
10953 start += width;
10954 }
10955
10956 return true;
10957}
10958
10963static VALUE
10964pm_parse_process_error(const pm_parse_result_t *result)
10965{
10966 const pm_parser_t *parser = &result->parser;
10967 const pm_diagnostic_t *head = (const pm_diagnostic_t *) parser->error_list.head;
10968 bool valid_utf8 = true;
10969
10970 pm_buffer_t buffer = { 0 };
10971 const pm_string_t *filepath = &parser->filepath;
10972
10973 int highlight = rb_stderr_tty_p();
10974 if (highlight) {
10975 const char *no_color = getenv("NO_COLOR");
10976 highlight = (no_color == NULL || no_color[0] == '\0') ? 2 : 1;
10977 }
10978
10979 for (const pm_diagnostic_t *error = head; error != NULL; error = (const pm_diagnostic_t *) error->node.next) {
10980 switch (error->level) {
10982 // It is implicitly assumed that the error messages will be
10983 // encodeable as UTF-8. Because of this, we can't include source
10984 // examples that contain invalid byte sequences. So if any source
10985 // examples include invalid UTF-8 byte sequences, we will skip
10986 // showing source examples entirely.
10987 if (valid_utf8 && !pm_parse_process_error_utf8_p(parser, &error->location)) {
10988 valid_utf8 = false;
10989 }
10990 break;
10992 // Any errors with the level PM_ERROR_LEVEL_ARGUMENT take over as
10993 // the only argument that gets raised. This is to allow priority
10994 // messages that should be handled before anything else.
10995 int32_t line_number = (int32_t) pm_location_line_number(parser, &error->location);
10996
10997 pm_buffer_append_format(
10998 &buffer,
10999 "%.*s:%" PRIi32 ": %s",
11000 (int) pm_string_length(filepath),
11001 pm_string_source(filepath),
11002 line_number,
11003 error->message
11004 );
11005
11006 if (pm_parse_process_error_utf8_p(parser, &error->location)) {
11007 pm_buffer_append_byte(&buffer, '\n');
11008
11009 pm_list_node_t *list_node = (pm_list_node_t *) error;
11010 pm_list_t error_list = { .size = 1, .head = list_node, .tail = list_node };
11011
11012 pm_parse_errors_format(parser, &error_list, &buffer, highlight, false);
11013 }
11014
11015 VALUE value = rb_exc_new(rb_eArgError, pm_buffer_value(&buffer), pm_buffer_length(&buffer));
11016 pm_buffer_free(&buffer);
11017
11018 return value;
11019 }
11020 case PM_ERROR_LEVEL_LOAD: {
11021 // Load errors are much simpler, because they don't include any of
11022 // the source in them. We create the error directly from the
11023 // message.
11024 VALUE message = rb_enc_str_new_cstr(error->message, rb_locale_encoding());
11025 VALUE value = rb_exc_new3(rb_eLoadError, message);
11026 rb_ivar_set(value, rb_intern_const("@path"), Qnil);
11027 return value;
11028 }
11029 }
11030 }
11031
11032 pm_buffer_append_format(
11033 &buffer,
11034 "%.*s:%" PRIi32 ": syntax error%s found\n",
11035 (int) pm_string_length(filepath),
11036 pm_string_source(filepath),
11037 (int32_t) pm_location_line_number(parser, &head->location),
11038 (parser->error_list.size > 1) ? "s" : ""
11039 );
11040
11041 if (valid_utf8) {
11042 pm_parse_errors_format(parser, &parser->error_list, &buffer, highlight, true);
11043 }
11044 else {
11045 for (const pm_diagnostic_t *error = head; error != NULL; error = (const pm_diagnostic_t *) error->node.next) {
11046 if (error != head) pm_buffer_append_byte(&buffer, '\n');
11047 pm_buffer_append_format(&buffer, "%.*s:%" PRIi32 ": %s", (int) pm_string_length(filepath), pm_string_source(filepath), (int32_t) pm_location_line_number(parser, &error->location), error->message);
11048 }
11049 }
11050
11051 VALUE message = rb_enc_str_new(pm_buffer_value(&buffer), pm_buffer_length(&buffer), result->node.encoding);
11052 VALUE error = rb_exc_new_str(rb_eSyntaxError, message);
11053
11054 rb_encoding *filepath_encoding = result->node.filepath_encoding != NULL ? result->node.filepath_encoding : rb_utf8_encoding();
11055 VALUE path = rb_enc_str_new((const char *) pm_string_source(filepath), pm_string_length(filepath), filepath_encoding);
11056
11057 rb_ivar_set(error, rb_intern_const("@path"), path);
11058 pm_buffer_free(&buffer);
11059
11060 return error;
11061}
11062
11068static VALUE
11069pm_parse_process(pm_parse_result_t *result, pm_node_t *node, VALUE *script_lines)
11070{
11071 pm_parser_t *parser = &result->parser;
11072
11073 // First, set up the scope node so that the AST node is attached and can be
11074 // freed regardless of whether or we return an error.
11075 pm_scope_node_t *scope_node = &result->node;
11076 rb_encoding *filepath_encoding = scope_node->filepath_encoding;
11077 int coverage_enabled = scope_node->coverage_enabled;
11078
11079 pm_scope_node_init(node, scope_node, NULL);
11080 scope_node->filepath_encoding = filepath_encoding;
11081
11082 scope_node->encoding = rb_enc_find(parser->encoding->name);
11083 if (!scope_node->encoding) rb_bug("Encoding not found %s!", parser->encoding->name);
11084
11085 scope_node->coverage_enabled = coverage_enabled;
11086
11087 // If RubyVM.keep_script_lines is set to true, then we need to create that
11088 // array of script lines here.
11089 if (script_lines != NULL) {
11090 *script_lines = rb_ary_new_capa(parser->newline_list.size);
11091
11092 for (size_t index = 0; index < parser->newline_list.size; index++) {
11093 size_t offset = parser->newline_list.offsets[index];
11094 size_t length = index == parser->newline_list.size - 1 ? ((size_t) (parser->end - (parser->start + offset))) : (parser->newline_list.offsets[index + 1] - offset);
11095 rb_ary_push(*script_lines, rb_enc_str_new((const char *) parser->start + offset, length, scope_node->encoding));
11096 }
11097
11098 scope_node->script_lines = script_lines;
11099 }
11100
11101 // Emit all of the various warnings from the parse.
11102 const pm_diagnostic_t *warning;
11103 const char *warning_filepath = (const char *) pm_string_source(&parser->filepath);
11104
11105 for (warning = (const pm_diagnostic_t *) parser->warning_list.head; warning != NULL; warning = (const pm_diagnostic_t *) warning->node.next) {
11106 int line = pm_location_line_number(parser, &warning->location);
11107
11108 if (warning->level == PM_WARNING_LEVEL_VERBOSE) {
11109 rb_enc_compile_warning(scope_node->encoding, warning_filepath, line, "%s", warning->message);
11110 }
11111 else {
11112 rb_enc_compile_warn(scope_node->encoding, warning_filepath, line, "%s", warning->message);
11113 }
11114 }
11115
11116 // If there are errors, raise an appropriate error and free the result.
11117 if (parser->error_list.size > 0) {
11118 VALUE error = pm_parse_process_error(result);
11119
11120 // TODO: We need to set the backtrace.
11121 // rb_funcallv(error, rb_intern("set_backtrace"), 1, &path);
11122 return error;
11123 }
11124
11125 // Now set up the constant pool and intern all of the various constants into
11126 // their corresponding IDs.
11127 scope_node->parser = parser;
11128 scope_node->constants = parser->constant_pool.size ? xcalloc(parser->constant_pool.size, sizeof(ID)) : NULL;
11129
11130 for (uint32_t index = 0; index < parser->constant_pool.size; index++) {
11131 pm_constant_t *constant = &parser->constant_pool.constants[index];
11132 scope_node->constants[index] = rb_intern3((const char *) constant->start, constant->length, scope_node->encoding);
11133 }
11134
11135 scope_node->index_lookup_table = st_init_numtable();
11136 pm_constant_id_list_t *locals = &scope_node->locals;
11137 for (size_t index = 0; index < locals->size; index++) {
11138 st_insert(scope_node->index_lookup_table, locals->ids[index], index);
11139 }
11140
11141 // If we got here, this is a success and we can return Qnil to indicate that
11142 // no error should be raised.
11143 result->parsed = true;
11144 return Qnil;
11145}
11146
11151static void
11152pm_options_frozen_string_literal_init(pm_options_t *options)
11153{
11154 int frozen_string_literal = rb_iseq_opt_frozen_string_literal();
11155
11156 switch (frozen_string_literal) {
11157 case ISEQ_FROZEN_STRING_LITERAL_UNSET:
11158 break;
11159 case ISEQ_FROZEN_STRING_LITERAL_DISABLED:
11161 break;
11162 case ISEQ_FROZEN_STRING_LITERAL_ENABLED:
11164 break;
11165 default:
11166 rb_bug("pm_options_frozen_string_literal_init: invalid frozen_string_literal=%d", frozen_string_literal);
11167 break;
11168 }
11169}
11170
11175static inline VALUE
11176pm_parse_file_script_lines(const pm_scope_node_t *scope_node, const pm_parser_t *parser)
11177{
11178 const pm_newline_list_t *newline_list = &parser->newline_list;
11179 const char *start = (const char *) parser->start;
11180 const char *end = (const char *) parser->end;
11181
11182 // If we end exactly on a newline, then there's no need to push on a final
11183 // segment. If we don't, then we need to push on the last offset up to the
11184 // end of the string.
11185 size_t last_offset = newline_list->offsets[newline_list->size - 1];
11186 bool last_push = start + last_offset != end;
11187
11188 // Create the ruby strings that represent the lines of the source.
11189 VALUE lines = rb_ary_new_capa(newline_list->size - (last_push ? 0 : 1));
11190
11191 for (size_t index = 0; index < newline_list->size - 1; index++) {
11192 size_t offset = newline_list->offsets[index];
11193 size_t length = newline_list->offsets[index + 1] - offset;
11194
11195 rb_ary_push(lines, rb_enc_str_new(start + offset, length, scope_node->encoding));
11196 }
11197
11198 // Push on the last line if we need to.
11199 if (last_push) {
11200 rb_ary_push(lines, rb_enc_str_new(start + last_offset, end - (start + last_offset), scope_node->encoding));
11201 }
11202
11203 return lines;
11204}
11205
11206// This is essentially pm_string_mapped_init(), preferring to memory map the
11207// file, with additional handling for files that require blocking to properly
11208// read (e.g. pipes).
11210pm_read_file(pm_string_t *string, const char *filepath)
11211{
11212#ifdef _WIN32
11213 // Open the file for reading.
11214 int length = MultiByteToWideChar(CP_UTF8, 0, filepath, -1, NULL, 0);
11215 if (length == 0) return PM_STRING_INIT_ERROR_GENERIC;
11216
11217 WCHAR *wfilepath = xmalloc(sizeof(WCHAR) * ((size_t) length));
11218 if ((wfilepath == NULL) || (MultiByteToWideChar(CP_UTF8, 0, filepath, -1, wfilepath, length) == 0)) {
11219 xfree(wfilepath);
11221 }
11222
11223 HANDLE file = CreateFileW(wfilepath, GENERIC_READ, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_READONLY, NULL);
11224 if (file == INVALID_HANDLE_VALUE) {
11226
11227 if (GetLastError() == ERROR_ACCESS_DENIED) {
11228 DWORD attributes = GetFileAttributesW(wfilepath);
11229 if ((attributes != INVALID_FILE_ATTRIBUTES) && (attributes & FILE_ATTRIBUTE_DIRECTORY)) {
11231 }
11232 }
11233
11234 xfree(wfilepath);
11235 return result;
11236 }
11237
11238 // Get the file size.
11239 DWORD file_size = GetFileSize(file, NULL);
11240 if (file_size == INVALID_FILE_SIZE) {
11241 CloseHandle(file);
11242 xfree(wfilepath);
11244 }
11245
11246 // If the file is empty, then we don't need to do anything else, we'll set
11247 // the source to a constant empty string and return.
11248 if (file_size == 0) {
11249 CloseHandle(file);
11250 xfree(wfilepath);
11251 const uint8_t source[] = "";
11252 *string = (pm_string_t) { .type = PM_STRING_CONSTANT, .source = source, .length = 0 };
11254 }
11255
11256 // Create a mapping of the file.
11257 HANDLE mapping = CreateFileMapping(file, NULL, PAGE_READONLY, 0, 0, NULL);
11258 if (mapping == NULL) {
11259 CloseHandle(file);
11260 xfree(wfilepath);
11262 }
11263
11264 // Map the file into memory.
11265 uint8_t *source = (uint8_t *) MapViewOfFile(mapping, FILE_MAP_READ, 0, 0, 0);
11266 CloseHandle(mapping);
11267 CloseHandle(file);
11268 xfree(wfilepath);
11269
11270 if (source == NULL) {
11272 }
11273
11274 *string = (pm_string_t) { .type = PM_STRING_MAPPED, .source = source, .length = (size_t) file_size };
11276#elif defined(_POSIX_MAPPED_FILES)
11277 // Open the file for reading
11278 const int open_mode = O_RDONLY | O_NONBLOCK;
11279 int fd = open(filepath, open_mode);
11280 if (fd == -1) {
11282 }
11283
11284 // Stat the file to get the file size
11285 struct stat sb;
11286 if (fstat(fd, &sb) == -1) {
11287 close(fd);
11289 }
11290
11291 // Ensure it is a file and not a directory
11292 if (S_ISDIR(sb.st_mode)) {
11293 close(fd);
11295 }
11296
11297 // We need to wait for data first before reading from pipes and character
11298 // devices. To not block the entire VM, we need to release the GVL while
11299 // reading. Use IO#read to do this and let the GC handle closing the FD.
11300 if (S_ISFIFO(sb.st_mode) || S_ISCHR(sb.st_mode)) {
11301 VALUE io = rb_io_fdopen((int) fd, open_mode, filepath);
11303 VALUE contents = rb_funcall(io, rb_intern("read"), 0);
11304
11305 if (!RB_TYPE_P(contents, T_STRING)) {
11307 }
11308
11309 long len = RSTRING_LEN(contents);
11310 if (len < 0) {
11312 }
11313
11314 size_t length = (size_t) len;
11315 uint8_t *source = malloc(length);
11316 memcpy(source, RSTRING_PTR(contents), length);
11317 *string = (pm_string_t) { .type = PM_STRING_OWNED, .source = source, .length = length };
11318
11320 }
11321
11322 // mmap the file descriptor to virtually get the contents
11323 size_t size = (size_t) sb.st_size;
11324 uint8_t *source = NULL;
11325
11326 if (size == 0) {
11327 close(fd);
11328 const uint8_t source[] = "";
11329 *string = (pm_string_t) { .type = PM_STRING_CONSTANT, .source = source, .length = 0 };
11331 }
11332
11333 source = mmap(NULL, size, PROT_READ, MAP_PRIVATE, fd, 0);
11334 if (source == MAP_FAILED) {
11335 close(fd);
11337 }
11338
11339 close(fd);
11340 *string = (pm_string_t) { .type = PM_STRING_MAPPED, .source = source, .length = size };
11342#else
11343 return pm_string_file_init(string, filepath);
11344#endif
11345}
11346
11351VALUE
11352pm_load_file(pm_parse_result_t *result, VALUE filepath, bool load_error)
11353{
11354 pm_string_init_result_t init_result = pm_read_file(&result->input, RSTRING_PTR(filepath));
11355
11356 if (init_result == PM_STRING_INIT_SUCCESS) {
11357 pm_options_frozen_string_literal_init(&result->options);
11358 return Qnil;
11359 }
11360
11361 int err;
11362 if (init_result == PM_STRING_INIT_ERROR_DIRECTORY) {
11363 err = EISDIR;
11364 } else {
11365#ifdef _WIN32
11366 err = rb_w32_map_errno(GetLastError());
11367#else
11368 err = errno;
11369#endif
11370 }
11371
11372 VALUE error;
11373 if (load_error) {
11374 VALUE message = rb_str_buf_new_cstr(strerror(err));
11375 rb_str_cat2(message, " -- ");
11376 rb_str_append(message, filepath);
11377
11378 error = rb_exc_new3(rb_eLoadError, message);
11379 rb_ivar_set(error, rb_intern_const("@path"), filepath);
11380 } else {
11381 error = rb_syserr_new(err, RSTRING_PTR(filepath));
11382 RB_GC_GUARD(filepath);
11383 }
11384
11385 return error;
11386}
11387
11394VALUE
11395pm_parse_file(pm_parse_result_t *result, VALUE filepath, VALUE *script_lines)
11396{
11397 result->node.filepath_encoding = rb_enc_get(filepath);
11398 pm_options_filepath_set(&result->options, RSTRING_PTR(filepath));
11399 RB_GC_GUARD(filepath);
11400
11401 pm_options_version_for_current_ruby_set(&result->options);
11402
11403 pm_parser_init(&result->parser, pm_string_source(&result->input), pm_string_length(&result->input), &result->options);
11404 pm_node_t *node = pm_parse(&result->parser);
11405
11406 VALUE error = pm_parse_process(result, node, script_lines);
11407
11408 // If we're parsing a filepath, then we need to potentially support the
11409 // SCRIPT_LINES__ constant, which can be a hash that has an array of lines
11410 // of every read file.
11411 ID id_script_lines = rb_intern("SCRIPT_LINES__");
11412
11413 if (rb_const_defined_at(rb_cObject, id_script_lines)) {
11414 VALUE constant_script_lines = rb_const_get_at(rb_cObject, id_script_lines);
11415
11416 if (RB_TYPE_P(constant_script_lines, T_HASH)) {
11417 rb_hash_aset(constant_script_lines, filepath, pm_parse_file_script_lines(&result->node, &result->parser));
11418 }
11419 }
11420
11421 return error;
11422}
11423
11428VALUE
11429pm_load_parse_file(pm_parse_result_t *result, VALUE filepath, VALUE *script_lines)
11430{
11431 VALUE error = pm_load_file(result, filepath, false);
11432 if (NIL_P(error)) {
11433 error = pm_parse_file(result, filepath, script_lines);
11434 }
11435
11436 return error;
11437}
11438
11445VALUE
11446pm_parse_string(pm_parse_result_t *result, VALUE source, VALUE filepath, VALUE *script_lines)
11447{
11448 rb_encoding *encoding = rb_enc_get(source);
11449 if (!rb_enc_asciicompat(encoding)) {
11450 return rb_exc_new_cstr(rb_eArgError, "invalid source encoding");
11451 }
11452
11453 pm_options_frozen_string_literal_init(&result->options);
11454 pm_string_constant_init(&result->input, RSTRING_PTR(source), RSTRING_LEN(source));
11455 pm_options_encoding_set(&result->options, rb_enc_name(encoding));
11456
11457 result->node.filepath_encoding = rb_enc_get(filepath);
11458 pm_options_filepath_set(&result->options, RSTRING_PTR(filepath));
11459 RB_GC_GUARD(filepath);
11460
11461 pm_options_version_for_current_ruby_set(&result->options);
11462
11463 pm_parser_init(&result->parser, pm_string_source(&result->input), pm_string_length(&result->input), &result->options);
11464 pm_node_t *node = pm_parse(&result->parser);
11465
11466 return pm_parse_process(result, node, script_lines);
11467}
11468
11470 VALUE rb_stdin;
11471 int eof_seen;
11472};
11473
11474static int
11475pm_parse_stdin_eof(void *stream)
11476{
11477 struct rb_stdin_wrapper * wrapped_stdin = (struct rb_stdin_wrapper *)stream;
11478 return wrapped_stdin->eof_seen;
11479}
11480
11481VALUE rb_io_gets_limit_internal(VALUE io, long limit);
11482
11486static char *
11487pm_parse_stdin_fgets(char *string, int size, void *stream)
11488{
11489 RUBY_ASSERT(size > 0);
11490
11491 struct rb_stdin_wrapper * wrapped_stdin = (struct rb_stdin_wrapper *)stream;
11492
11493 VALUE line = rb_io_gets_limit_internal(wrapped_stdin->rb_stdin, size - 1);
11494 if (NIL_P(line)) {
11495 return NULL;
11496 }
11497
11498 const char *cstr = RSTRING_PTR(line);
11499 long length = RSTRING_LEN(line);
11500
11501 memcpy(string, cstr, length);
11502 string[length] = '\0';
11503
11504 // We're reading strings from stdin via gets. We'll assume that if the
11505 // string is smaller than the requested length, and doesn't end with a
11506 // newline, that we hit EOF.
11507 if (length < (size - 1) && string[length - 1] != '\n') {
11508 wrapped_stdin->eof_seen = 1;
11509 }
11510
11511 return string;
11512}
11513
11514// We need access to this function when we're done parsing stdin.
11515void rb_reset_argf_lineno(long n);
11516
11522VALUE
11523pm_parse_stdin(pm_parse_result_t *result)
11524{
11525 pm_options_frozen_string_literal_init(&result->options);
11526
11527 struct rb_stdin_wrapper wrapped_stdin = {
11528 rb_stdin,
11529 0
11530 };
11531
11532 pm_buffer_t buffer;
11533 pm_node_t *node = pm_parse_stream(&result->parser, &buffer, (void *) &wrapped_stdin, pm_parse_stdin_fgets, pm_parse_stdin_eof, &result->options);
11534
11535 // Copy the allocated buffer contents into the input string so that it gets
11536 // freed. At this point we've handed over ownership, so we don't need to
11537 // free the buffer itself.
11538 pm_string_owned_init(&result->input, (uint8_t *) pm_buffer_value(&buffer), pm_buffer_length(&buffer));
11539
11540 // When we're done parsing, we reset $. because we don't want the fact that
11541 // we went through an IO object to be visible to the user.
11542 rb_reset_argf_lineno(0);
11543
11544 return pm_parse_process(result, node, NULL);
11545}
11546
11547#define PM_VERSION_FOR_RELEASE(major, minor) PM_VERSION_FOR_RELEASE_IMPL(major, minor)
11548#define PM_VERSION_FOR_RELEASE_IMPL(major, minor) PM_OPTIONS_VERSION_CRUBY_##major##_##minor
11549
11550void pm_options_version_for_current_ruby_set(pm_options_t *options) {
11551 options->version = PM_VERSION_FOR_RELEASE(RUBY_API_VERSION_MAJOR, RUBY_API_VERSION_MINOR);
11552}
11553
11554#undef NEW_ISEQ
11555#define NEW_ISEQ OLD_ISEQ
11556
11557#undef NEW_CHILD_ISEQ
11558#define NEW_CHILD_ISEQ OLD_CHILD_ISEQ
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:219
@ PM_WARNING_LEVEL_VERBOSE
For warnings which should be emitted if $VERBOSE == true.
Definition diagnostic.h:415
@ PM_ERROR_LEVEL_ARGUMENT
For errors that should raise an argument error.
Definition diagnostic.h:401
@ PM_ERROR_LEVEL_LOAD
For errors that should raise a load error.
Definition diagnostic.h:404
@ PM_ERROR_LEVEL_SYNTAX
For errors that should raise a syntax error.
Definition diagnostic.h:398
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition event.h:40
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition event.h:56
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition event.h:39
#define RUBY_EVENT_LINE
Encountered a new line.
Definition event.h:38
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition event.h:42
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition event.h:55
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition event.h:41
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
Definition event.h:61
#define rb_str_new2
Old name of rb_str_new_cstr.
Definition string.h:1674
#define ALLOCV
Old name of RB_ALLOCV.
Definition memory.h:404
#define ALLOC
Old name of RB_ALLOC.
Definition memory.h:400
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition value_type.h:78
#define xfree
Old name of ruby_xfree.
Definition xmalloc.h:58
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define rb_str_cat2
Old name of rb_str_cat_cstr.
Definition string.h:1682
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
Definition long.h:60
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define xmalloc
Old name of ruby_xmalloc.
Definition xmalloc.h:53
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define ZALLOC_N
Old name of RB_ZALLOC_N.
Definition memory.h:401
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition memory.h:399
#define rb_exc_new3
Old name of rb_exc_new_str.
Definition error.h:38
#define FLONUM_P
Old name of RB_FLONUM_P.
#define Qtrue
Old name of RUBY_Qtrue.
#define INT2NUM
Old name of RB_INT2NUM.
Definition int.h:43
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define xcalloc
Old name of ruby_xcalloc.
Definition xmalloc.h:55
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define UINT2NUM
Old name of RB_UINT2NUM.
Definition int.h:46
#define CONST_ID
Old name of RUBY_CONST_ID.
Definition symbol.h:47
#define ruby_debug
This variable controls whether the interpreter is in debug mode.
Definition error.h:486
VALUE rb_eNotImpError
NotImplementedError exception.
Definition error.c:1441
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:683
VALUE rb_eStandardError
StandardError exception.
Definition error.c:1428
VALUE rb_eLoadError
LoadError exception.
Definition error.c:1449
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1431
VALUE rb_eNoMatchingPatternError
NoMatchingPatternError exception.
Definition error.c:1444
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_exc_new(VALUE etype, const char *ptr, long len)
Creates an instance of the passed exception class.
Definition error.c:1469
VALUE rb_eNoMatchingPatternKeyError
NoMatchingPatternKeyError exception.
Definition error.c:1445
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1482
VALUE rb_eSyntaxError
SyntaxError exception.
Definition error.c:1448
VALUE rb_syserr_new(int n, const char *mesg)
Creates an exception object that represents the given C errno.
Definition error.c:3895
VALUE rb_cArray
Array class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:100
VALUE rb_stdin
STDIN constant.
Definition io.c:201
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1343
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition gc.h:615
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
int rb_enc_str_coderange(VALUE str)
Scans the passed string to collect its code range.
Definition string.c:945
VALUE rb_enc_interned_str(const char *ptr, long len, rb_encoding *enc)
Identical to rb_enc_str_new(), except it returns a "f"string.
Definition string.c:12554
VALUE rb_enc_str_new_cstr(const char *ptr, rb_encoding *enc)
Identical to rb_enc_str_new(), except it assumes the passed pointer is a pointer to a C string.
Definition string.c:1153
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1117
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
VALUE rb_ary_join(VALUE ary, VALUE sep)
Recursively stringises the elements of the passed array, flattens that result, then joins the sequenc...
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
VALUE rb_io_fdopen(int fd, int flags, const char *path)
Creates an IO instance whose backend is the given file descriptor.
Definition io.c:9340
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
Definition range.c:69
VALUE rb_rational_new(VALUE num, VALUE den)
Constructs a Rational, with reduction.
Definition rational.c:1976
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3795
VALUE rb_str_tmp_new(long len)
Allocates a "temporary" string.
Definition string.c:1744
#define rb_str_new(str, len)
Allocates an instance of rb_cString.
Definition string.h:1497
#define rb_exc_new_cstr(exc, str)
Identical to rb_exc_new(), except it assumes the passed pointer is a pointer to a C string.
Definition string.h:1669
#define rb_str_buf_new_cstr(str)
Identical to rb_str_new_cstr, except done differently.
Definition string.h:1638
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition string.c:4032
VALUE rb_str_freeze(VALUE str)
This is the implementation of String#freeze.
Definition string.c:3276
#define rb_str_new_cstr(str)
Identical to rb_str_new, except it assumes the passed pointer is a pointer to a C string.
Definition string.h:1513
VALUE rb_obj_as_string(VALUE obj)
Try converting an object to its stringised representation using its to_s method, if any.
Definition string.c:1848
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2013
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
Definition variable.c:3445
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
Definition variable.c:3777
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:284
VALUE rb_id2sym(ID id)
Allocates an instance of rb_cSymbol that has the given id.
Definition symbol.c:974
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:993
@ RUBY_IO_READABLE
IO::READABLE
Definition io.h:97
VALUE rb_io_wait(VALUE io, VALUE events, VALUE timeout)
Blocks until the passed IO is ready for the passed events.
Definition io.c:1454
int len
Length of the buffer.
Definition io.h:8
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
Definition ractor.h:235
VALUE rb_ractor_make_shareable(VALUE obj)
Destructively transforms the passed object so that multiple Ractors can share it.
Definition ractor.c:1496
#define DECIMAL_SIZE_OF(expr)
An approximation of decimal representation size.
Definition util.h:48
#define RUBY_API_VERSION_MAJOR
Major version.
Definition version.h:64
#define RUBY_API_VERSION_MINOR
Minor version.
Definition version.h:70
#define RB_INT2NUM
Just another name of rb_int2num_inline.
Definition int.h:37
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE type(ANYARGS)
ANYARGS-ed function type.
PRISM_EXPORTED_FUNCTION void pm_options_encoding_set(pm_options_t *options, const char *encoding)
Set the encoding option on the given options struct.
Definition options.c:24
PRISM_EXPORTED_FUNCTION void pm_options_free(pm_options_t *options)
Free the internal memory associated with the options.
Definition options.c:208
PRISM_EXPORTED_FUNCTION void pm_options_frozen_string_literal_set(pm_options_t *options, bool frozen_string_literal)
Set the frozen string literal option on the given options struct.
Definition options.c:48
PRISM_EXPORTED_FUNCTION void pm_options_filepath_set(pm_options_t *options, const char *filepath)
Set the filepath option on the given options struct.
Definition options.c:16
void pm_buffer_free(pm_buffer_t *buffer)
Free the memory associated with the buffer.
Definition pm_buffer.c:355
size_t pm_buffer_length(const pm_buffer_t *buffer)
Return the length of the buffer.
Definition pm_buffer.c:43
char * pm_buffer_value(const pm_buffer_t *buffer)
Return the value of the buffer.
Definition pm_buffer.c:35
uint32_t pm_constant_id_t
A constant id is a unique identifier for a constant in the constant pool.
PRISM_EXPORTED_FUNCTION size_t pm_string_length(const pm_string_t *string)
Returns the length associated with the string.
Definition pm_string.c:351
PRISM_EXPORTED_FUNCTION const uint8_t * pm_string_source(const pm_string_t *string)
Returns the start pointer associated with the string.
Definition pm_string.c:359
PRISM_EXPORTED_FUNCTION void pm_string_free(pm_string_t *string)
Free the associated memory of the given string.
Definition pm_string.c:367
PRISM_EXPORTED_FUNCTION pm_string_init_result_t pm_string_file_init(pm_string_t *string, const char *filepath)
Read the file indicated by the filepath parameter into source and load its contents and size into the...
Definition pm_string.c:210
pm_string_init_result_t
Represents the result of calling pm_string_mapped_init or pm_string_file_init.
Definition pm_string.h:105
@ PM_STRING_INIT_SUCCESS
Indicates that the string was successfully initialized.
Definition pm_string.h:107
@ PM_STRING_INIT_ERROR_GENERIC
Indicates a generic error from a string_*_init function, where the type of error should be read from ...
Definition pm_string.h:112
@ PM_STRING_INIT_ERROR_DIRECTORY
Indicates that the file that was attempted to be opened was a directory.
Definition pm_string.h:116
#define PM_ENCODING_US_ASCII_ENTRY
This is the US-ASCII encoding.
Definition encoding.h:252
#define PM_NODE_LIST_FOREACH(list, index, node)
Loop through each node in the node list, writing each node to the given pm_node_t pointer.
Definition node.h:17
PRISM_EXPORTED_FUNCTION pm_node_t * pm_parse(pm_parser_t *parser)
Parse the Ruby source associated with the given parser and return the tree.
Definition prism.c:22100
PRISM_EXPORTED_FUNCTION void pm_parser_free(pm_parser_t *parser)
Free any memory associated with the given parser.
Definition prism.c:22074
PRISM_EXPORTED_FUNCTION pm_node_t * pm_parse_stream(pm_parser_t *parser, pm_buffer_t *buffer, void *stream, pm_parse_stream_fgets_t *stream_fgets, pm_parse_stream_feof_t *stream_feof, const pm_options_t *options)
Parse a stream of Ruby source and return the tree.
Definition prism.c:22187
PRISM_EXPORTED_FUNCTION void pm_parser_init(pm_parser_t *parser, const uint8_t *source, size_t size, const pm_options_t *options)
Initialize a parser with the given start and end pointers.
Definition prism.c:21775
The main header file for the prism parser.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define errno
Ractor-aware version of errno.
Definition ruby.h:388
#define RTEST
This is an old name of RB_TEST.
AliasGlobalVariableNode.
Definition ast.h:1106
struct pm_node * old_name
AliasGlobalVariableNode::old_name.
Definition ast.h:1129
struct pm_node * new_name
AliasGlobalVariableNode::new_name.
Definition ast.h:1119
AliasMethodNode.
Definition ast.h:1154
struct pm_node * old_name
AliasMethodNode::old_name.
Definition ast.h:1189
struct pm_node * new_name
AliasMethodNode::new_name.
Definition ast.h:1173
AlternationPatternNode.
Definition ast.h:1214
struct pm_node * left
AlternationPatternNode::left.
Definition ast.h:1227
struct pm_node * right
AlternationPatternNode::right.
Definition ast.h:1237
AndNode.
Definition ast.h:1262
struct pm_node * left
AndNode::left.
Definition ast.h:1278
struct pm_node * right
AndNode::right.
Definition ast.h:1291
ArgumentsNode.
Definition ast.h:1323
pm_node_t base
The embedded base node.
Definition ast.h:1325
struct pm_node_list arguments
ArgumentsNode::arguments.
Definition ast.h:1336
ArrayNode.
Definition ast.h:1354
struct pm_node_list elements
ArrayNode::elements.
Definition ast.h:1364
ArrayPatternNode.
Definition ast.h:1415
struct pm_node_list requireds
ArrayPatternNode::requireds.
Definition ast.h:1444
struct pm_node * rest
ArrayPatternNode::rest.
Definition ast.h:1454
struct pm_node * constant
ArrayPatternNode::constant.
Definition ast.h:1434
struct pm_node_list posts
ArrayPatternNode::posts.
Definition ast.h:1464
AssocNode.
Definition ast.h:1499
struct pm_node * value
AssocNode::value.
Definition ast.h:1531
struct pm_node * key
AssocNode::key.
Definition ast.h:1518
AssocSplatNode.
Definition ast.h:1556
struct pm_node * value
AssocSplatNode::value.
Definition ast.h:1569
BackReferenceReadNode.
Definition ast.h:1594
pm_node_t base
The embedded base node.
Definition ast.h:1596
BeginNode.
Definition ast.h:1625
struct pm_ensure_node * ensure_clause
BeginNode::ensure_clause.
Definition ast.h:1678
struct pm_rescue_node * rescue_clause
BeginNode::rescue_clause.
Definition ast.h:1658
struct pm_statements_node * statements
BeginNode::statements.
Definition ast.h:1648
struct pm_else_node * else_clause
BeginNode::else_clause.
Definition ast.h:1668
BlockArgumentNode.
Definition ast.h:1703
struct pm_node * expression
BlockArgumentNode::expression.
Definition ast.h:1716
BlockLocalVariableNode.
Definition ast.h:1744
BlockNode.
Definition ast.h:1772
struct pm_node * parameters
BlockNode::parameters.
Definition ast.h:1799
struct pm_node * body
BlockNode::body.
Definition ast.h:1809
pm_constant_id_list_t locals
BlockNode::locals.
Definition ast.h:1785
BlockParameterNode.
Definition ast.h:1848
BlockParametersNode.
Definition ast.h:1902
BreakNode.
Definition ast.h:1976
struct pm_arguments_node * arguments
BreakNode::arguments.
Definition ast.h:1989
A pm_buffer_t is a simple memory buffer that stores data in a contiguous block of memory.
Definition pm_buffer.h:22
CallAndWriteNode.
Definition ast.h:2020
struct pm_node * value
CallAndWriteNode::value.
Definition ast.h:2093
pm_constant_id_t read_name
CallAndWriteNode::read_name.
Definition ast.h:2063
pm_constant_id_t write_name
CallAndWriteNode::write_name.
Definition ast.h:2073
struct pm_node * receiver
CallAndWriteNode::receiver.
Definition ast.h:2033
CallNode.
Definition ast.h:2129
pm_location_t closing_loc
CallNode::closing_loc.
Definition ast.h:2210
struct pm_node * receiver
CallNode::receiver.
Definition ast.h:2148
pm_constant_id_t name
CallNode::name.
Definition ast.h:2171
pm_node_t base
The embedded base node.
Definition ast.h:2131
pm_location_t message_loc
CallNode::message_loc.
Definition ast.h:2181
struct pm_arguments_node * arguments
CallNode::arguments.
Definition ast.h:2200
struct pm_node * block
CallNode::block.
Definition ast.h:2233
CallOperatorWriteNode.
Definition ast.h:2254
pm_constant_id_t read_name
CallOperatorWriteNode::read_name.
Definition ast.h:2297
pm_constant_id_t binary_operator
CallOperatorWriteNode::binary_operator.
Definition ast.h:2317
struct pm_node * receiver
CallOperatorWriteNode::receiver.
Definition ast.h:2267
pm_constant_id_t write_name
CallOperatorWriteNode::write_name.
Definition ast.h:2307
struct pm_node * value
CallOperatorWriteNode::value.
Definition ast.h:2337
CallOrWriteNode.
Definition ast.h:2358
struct pm_node * receiver
CallOrWriteNode::receiver.
Definition ast.h:2371
struct pm_node * value
CallOrWriteNode::value.
Definition ast.h:2431
pm_constant_id_t write_name
CallOrWriteNode::write_name.
Definition ast.h:2411
pm_constant_id_t read_name
CallOrWriteNode::read_name.
Definition ast.h:2401
CallTargetNode.
Definition ast.h:2460
pm_constant_id_t name
CallTargetNode::name.
Definition ast.h:2493
struct pm_node * receiver
CallTargetNode::receiver.
Definition ast.h:2473
CapturePatternNode.
Definition ast.h:2518
struct pm_local_variable_target_node * target
CapturePatternNode::target.
Definition ast.h:2541
struct pm_node * value
CapturePatternNode::value.
Definition ast.h:2531
CaseMatchNode.
Definition ast.h:2568
struct pm_node_list conditions
CaseMatchNode::conditions.
Definition ast.h:2591
struct pm_else_node * else_clause
CaseMatchNode::else_clause.
Definition ast.h:2601
struct pm_node * predicate
CaseMatchNode::predicate.
Definition ast.h:2581
CaseNode.
Definition ast.h:2638
struct pm_node * predicate
CaseNode::predicate.
Definition ast.h:2651
struct pm_else_node * else_clause
CaseNode::else_clause.
Definition ast.h:2671
struct pm_node_list conditions
CaseNode::conditions.
Definition ast.h:2661
ClassNode.
Definition ast.h:2706
struct pm_node * constant_path
ClassNode::constant_path.
Definition ast.h:2729
pm_constant_id_list_t locals
ClassNode::locals.
Definition ast.h:2714
pm_constant_id_t name
ClassNode::name.
Definition ast.h:2779
struct pm_node * body
ClassNode::body.
Definition ast.h:2760
struct pm_node * superclass
ClassNode::superclass.
Definition ast.h:2749
ClassVariableAndWriteNode.
Definition ast.h:2794
struct pm_node * value
ClassVariableAndWriteNode::value.
Definition ast.h:2837
pm_constant_id_t name
ClassVariableAndWriteNode::name.
Definition ast.h:2807
ClassVariableOperatorWriteNode.
Definition ast.h:2852
pm_constant_id_t name
ClassVariableOperatorWriteNode::name.
Definition ast.h:2860
pm_constant_id_t binary_operator
ClassVariableOperatorWriteNode::binary_operator.
Definition ast.h:2880
struct pm_node * value
ClassVariableOperatorWriteNode::value.
Definition ast.h:2875
ClassVariableOrWriteNode.
Definition ast.h:2895
pm_constant_id_t name
ClassVariableOrWriteNode::name.
Definition ast.h:2903
struct pm_node * value
ClassVariableOrWriteNode::value.
Definition ast.h:2918
ClassVariableReadNode.
Definition ast.h:2933
pm_constant_id_t name
ClassVariableReadNode::name.
Definition ast.h:2947
ClassVariableTargetNode.
Definition ast.h:2962
pm_constant_id_t name
ClassVariableTargetNode::name.
Definition ast.h:2970
ClassVariableWriteNode.
Definition ast.h:2985
struct pm_node * value
ClassVariableWriteNode::value.
Definition ast.h:3022
pm_constant_id_t name
ClassVariableWriteNode::name.
Definition ast.h:2999
ConstantAndWriteNode.
Definition ast.h:3047
pm_location_t name_loc
ConstantAndWriteNode::name_loc.
Definition ast.h:3060
pm_constant_id_t name
ConstantAndWriteNode::name.
Definition ast.h:3055
struct pm_node * value
ConstantAndWriteNode::value.
Definition ast.h:3070
A list of constant IDs.
size_t size
The number of constant ids in the list.
size_t capacity
The number of constant ids that have been allocated in the list.
pm_constant_id_t * ids
The constant ids in the list.
ConstantOperatorWriteNode.
Definition ast.h:3085
pm_constant_id_t name
ConstantOperatorWriteNode::name.
Definition ast.h:3093
pm_location_t name_loc
ConstantOperatorWriteNode::name_loc.
Definition ast.h:3098
pm_constant_id_t binary_operator
ConstantOperatorWriteNode::binary_operator.
Definition ast.h:3113
struct pm_node * value
ConstantOperatorWriteNode::value.
Definition ast.h:3108
ConstantOrWriteNode.
Definition ast.h:3128
pm_location_t name_loc
ConstantOrWriteNode::name_loc.
Definition ast.h:3141
pm_constant_id_t name
ConstantOrWriteNode::name.
Definition ast.h:3136
struct pm_node * value
ConstantOrWriteNode::value.
Definition ast.h:3151
ConstantPathAndWriteNode.
Definition ast.h:3166
struct pm_constant_path_node * target
ConstantPathAndWriteNode::target.
Definition ast.h:3174
struct pm_node * value
ConstantPathAndWriteNode::value.
Definition ast.h:3184
ConstantPathNode.
Definition ast.h:3199
pm_constant_id_t name
ConstantPathNode::name.
Definition ast.h:3225
struct pm_node * parent
ConstantPathNode::parent.
Definition ast.h:3218
ConstantPathOperatorWriteNode.
Definition ast.h:3266
struct pm_constant_path_node * target
ConstantPathOperatorWriteNode::target.
Definition ast.h:3274
struct pm_node * value
ConstantPathOperatorWriteNode::value.
Definition ast.h:3284
pm_constant_id_t binary_operator
ConstantPathOperatorWriteNode::binary_operator.
Definition ast.h:3289
ConstantPathOrWriteNode.
Definition ast.h:3304
struct pm_node * value
ConstantPathOrWriteNode::value.
Definition ast.h:3322
struct pm_constant_path_node * target
ConstantPathOrWriteNode::target.
Definition ast.h:3312
ConstantPathTargetNode.
Definition ast.h:3337
struct pm_node * parent
ConstantPathTargetNode::parent.
Definition ast.h:3345
pm_constant_id_t name
ConstantPathTargetNode::name.
Definition ast.h:3350
ConstantPathWriteNode.
Definition ast.h:3381
struct pm_constant_path_node * target
ConstantPathWriteNode::target.
Definition ast.h:3397
struct pm_node * value
ConstantPathWriteNode::value.
Definition ast.h:3417
uint32_t size
The number of buckets in the hash map.
pm_constant_t * constants
The constants that are stored in the buckets.
ConstantReadNode.
Definition ast.h:3432
pm_node_t base
The embedded base node.
Definition ast.h:3434
pm_constant_id_t name
ConstantReadNode::name.
Definition ast.h:3446
A constant in the pool which effectively stores a string.
size_t length
The length of the string.
const uint8_t * start
A pointer to the start of the string.
ConstantTargetNode.
Definition ast.h:3461
pm_constant_id_t name
ConstantTargetNode::name.
Definition ast.h:3469
ConstantWriteNode.
Definition ast.h:3484
struct pm_node * value
ConstantWriteNode::value.
Definition ast.h:3521
pm_constant_id_t name
ConstantWriteNode::name.
Definition ast.h:3498
DefNode.
Definition ast.h:3547
struct pm_parameters_node * parameters
DefNode::parameters.
Definition ast.h:3570
pm_constant_id_t name
DefNode::name.
Definition ast.h:3555
struct pm_node * body
DefNode::body.
Definition ast.h:3575
struct pm_node * receiver
DefNode::receiver.
Definition ast.h:3565
pm_node_t base
The embedded base node.
Definition ast.h:3549
pm_constant_id_list_t locals
DefNode::locals.
Definition ast.h:3580
DefinedNode.
Definition ast.h:3625
struct pm_node * value
DefinedNode::value.
Definition ast.h:3638
This struct represents a diagnostic generated during parsing.
Definition diagnostic.h:366
pm_location_t location
The location of the diagnostic in the source.
Definition diagnostic.h:371
const char * message
The message associated with the diagnostic.
Definition diagnostic.h:377
pm_list_node_t node
The embedded base node.
Definition diagnostic.h:368
uint8_t level
The level of the diagnostic, see pm_error_level_t and pm_warning_level_t for possible values.
Definition diagnostic.h:390
ElseNode.
Definition ast.h:3663
struct pm_statements_node * statements
ElseNode::statements.
Definition ast.h:3676
EmbeddedStatementsNode.
Definition ast.h:3696
struct pm_statements_node * statements
EmbeddedStatementsNode::statements.
Definition ast.h:3709
EmbeddedVariableNode.
Definition ast.h:3729
struct pm_node * variable
EmbeddedVariableNode::variable.
Definition ast.h:3742
This struct defines the functions necessary to implement the encoding interface so we can determine h...
Definition encoding.h:23
size_t(* char_width)(const uint8_t *b, ptrdiff_t n)
Return the number of bytes that the next character takes if it is valid in the encoding.
Definition encoding.h:29
const char * name
The name of the encoding.
Definition encoding.h:56
EnsureNode.
Definition ast.h:3761
struct pm_statements_node * statements
EnsureNode::statements.
Definition ast.h:3774
FindPatternNode.
Definition ast.h:3821
struct pm_node * constant
FindPatternNode::constant.
Definition ast.h:3834
struct pm_node * right
FindPatternNode::right.
Definition ast.h:3873
struct pm_node_list requireds
FindPatternNode::requireds.
Definition ast.h:3860
struct pm_splat_node * left
FindPatternNode::left.
Definition ast.h:3847
FlipFlopNode.
Definition ast.h:3917
pm_node_t base
The embedded base node.
Definition ast.h:3919
struct pm_node * left
FlipFlopNode::left.
Definition ast.h:3925
struct pm_node * right
FlipFlopNode::right.
Definition ast.h:3930
FloatNode.
Definition ast.h:3950
double value
FloatNode::value.
Definition ast.h:3960
ForNode.
Definition ast.h:3975
struct pm_statements_node * statements
ForNode::statements.
Definition ast.h:4010
struct pm_node * collection
ForNode::collection.
Definition ast.h:3998
ForwardingSuperNode.
Definition ast.h:4109
struct pm_block_node * block
ForwardingSuperNode::block.
Definition ast.h:4119
GlobalVariableAndWriteNode.
Definition ast.h:4134
struct pm_node * value
GlobalVariableAndWriteNode::value.
Definition ast.h:4157
pm_constant_id_t name
GlobalVariableAndWriteNode::name.
Definition ast.h:4142
GlobalVariableOperatorWriteNode.
Definition ast.h:4172
pm_constant_id_t name
GlobalVariableOperatorWriteNode::name.
Definition ast.h:4180
pm_constant_id_t binary_operator
GlobalVariableOperatorWriteNode::binary_operator.
Definition ast.h:4200
struct pm_node * value
GlobalVariableOperatorWriteNode::value.
Definition ast.h:4195
GlobalVariableOrWriteNode.
Definition ast.h:4215
pm_constant_id_t name
GlobalVariableOrWriteNode::name.
Definition ast.h:4223
struct pm_node * value
GlobalVariableOrWriteNode::value.
Definition ast.h:4238
GlobalVariableReadNode.
Definition ast.h:4253
pm_constant_id_t name
GlobalVariableReadNode::name.
Definition ast.h:4267
GlobalVariableTargetNode.
Definition ast.h:4282
pm_constant_id_t name
GlobalVariableTargetNode::name.
Definition ast.h:4290
GlobalVariableWriteNode.
Definition ast.h:4305
struct pm_node * value
GlobalVariableWriteNode::value.
Definition ast.h:4342
pm_constant_id_t name
GlobalVariableWriteNode::name.
Definition ast.h:4319
HashNode.
Definition ast.h:4367
struct pm_node_list elements
HashNode::elements.
Definition ast.h:4393
HashPatternNode.
Definition ast.h:4427
struct pm_node_list elements
HashPatternNode::elements.
Definition ast.h:4453
struct pm_node * rest
HashPatternNode::rest.
Definition ast.h:4469
struct pm_node * constant
HashPatternNode::constant.
Definition ast.h:4443
IfNode.
Definition ast.h:4516
struct pm_node * predicate
IfNode::predicate.
Definition ast.h:4549
struct pm_statements_node * statements
IfNode::statements.
Definition ast.h:4576
ImaginaryNode.
Definition ast.h:4622
struct pm_node * numeric
ImaginaryNode::numeric.
Definition ast.h:4630
ImplicitNode.
Definition ast.h:4651
struct pm_node * value
ImplicitNode::value.
Definition ast.h:4659
InNode.
Definition ast.h:4701
struct pm_statements_node * statements
InNode::statements.
Definition ast.h:4714
struct pm_node * pattern
InNode::pattern.
Definition ast.h:4709
IndexAndWriteNode.
Definition ast.h:4745
struct pm_arguments_node * arguments
IndexAndWriteNode::arguments.
Definition ast.h:4768
struct pm_node * receiver
IndexAndWriteNode::receiver.
Definition ast.h:4753
struct pm_block_argument_node * block
IndexAndWriteNode::block.
Definition ast.h:4778
struct pm_node * value
IndexAndWriteNode::value.
Definition ast.h:4788
IndexOperatorWriteNode.
Definition ast.h:4809
struct pm_block_argument_node * block
IndexOperatorWriteNode::block.
Definition ast.h:4842
struct pm_node * value
IndexOperatorWriteNode::value.
Definition ast.h:4857
struct pm_arguments_node * arguments
IndexOperatorWriteNode::arguments.
Definition ast.h:4832
pm_constant_id_t binary_operator
IndexOperatorWriteNode::binary_operator.
Definition ast.h:4847
struct pm_node * receiver
IndexOperatorWriteNode::receiver.
Definition ast.h:4817
IndexOrWriteNode.
Definition ast.h:4878
struct pm_block_argument_node * block
IndexOrWriteNode::block.
Definition ast.h:4911
struct pm_node * receiver
IndexOrWriteNode::receiver.
Definition ast.h:4886
struct pm_node * value
IndexOrWriteNode::value.
Definition ast.h:4921
struct pm_arguments_node * arguments
IndexOrWriteNode::arguments.
Definition ast.h:4901
IndexTargetNode.
Definition ast.h:4950
struct pm_node * receiver
IndexTargetNode::receiver.
Definition ast.h:4958
struct pm_arguments_node * arguments
IndexTargetNode::arguments.
Definition ast.h:4968
struct pm_block_argument_node * block
IndexTargetNode::block.
Definition ast.h:4978
InstanceVariableAndWriteNode.
Definition ast.h:4993
struct pm_node * value
InstanceVariableAndWriteNode::value.
Definition ast.h:5016
pm_constant_id_t name
InstanceVariableAndWriteNode::name.
Definition ast.h:5001
InstanceVariableOperatorWriteNode.
Definition ast.h:5031
struct pm_node * value
InstanceVariableOperatorWriteNode::value.
Definition ast.h:5054
pm_constant_id_t binary_operator
InstanceVariableOperatorWriteNode::binary_operator.
Definition ast.h:5059
pm_constant_id_t name
InstanceVariableOperatorWriteNode::name.
Definition ast.h:5039
InstanceVariableOrWriteNode.
Definition ast.h:5074
struct pm_node * value
InstanceVariableOrWriteNode::value.
Definition ast.h:5097
pm_constant_id_t name
InstanceVariableOrWriteNode::name.
Definition ast.h:5082
InstanceVariableReadNode.
Definition ast.h:5112
pm_constant_id_t name
InstanceVariableReadNode::name.
Definition ast.h:5126
InstanceVariableTargetNode.
Definition ast.h:5141
pm_constant_id_t name
InstanceVariableTargetNode::name.
Definition ast.h:5149
InstanceVariableWriteNode.
Definition ast.h:5164
pm_constant_id_t name
InstanceVariableWriteNode::name.
Definition ast.h:5178
struct pm_node * value
InstanceVariableWriteNode::value.
Definition ast.h:5201
IntegerNode.
Definition ast.h:5232
pm_integer_t value
IntegerNode::value.
Definition ast.h:5242
A structure represents an arbitrary-sized integer.
Definition pm_integer.h:20
size_t length
The number of allocated values.
Definition pm_integer.h:25
uint32_t value
Embedded value for small integer.
Definition pm_integer.h:36
uint32_t * values
List of 32-bit integers.
Definition pm_integer.h:30
bool negative
Whether or not the integer is negative.
Definition pm_integer.h:42
InterpolatedMatchLastLineNode.
Definition ast.h:5270
InterpolatedRegularExpressionNode.
Definition ast.h:5316
InterpolatedStringNode.
Definition ast.h:5353
struct pm_node_list parts
InterpolatedStringNode::parts.
Definition ast.h:5366
InterpolatedSymbolNode.
Definition ast.h:5386
struct pm_node_list parts
InterpolatedSymbolNode::parts.
Definition ast.h:5399
InterpolatedXStringNode.
Definition ast.h:5419
struct pm_node_list parts
InterpolatedXStringNode::parts.
Definition ast.h:5432
KeywordHashNode.
Definition ast.h:5491
struct pm_node_list elements
KeywordHashNode::elements.
Definition ast.h:5499
KeywordRestParameterNode.
Definition ast.h:5518
LambdaNode.
Definition ast.h:5551
struct pm_node * body
LambdaNode::body.
Definition ast.h:5584
pm_location_t opening_loc
LambdaNode::opening_loc.
Definition ast.h:5569
struct pm_node * parameters
LambdaNode::parameters.
Definition ast.h:5579
pm_location_t operator_loc
LambdaNode::operator_loc.
Definition ast.h:5564
pm_constant_id_list_t locals
LambdaNode::locals.
Definition ast.h:5559
A line and column in a string.
uint32_t column
The column number.
int32_t line
The line number.
This struct represents an abstract linked list that provides common functionality.
Definition pm_list.h:46
struct pm_list_node * next
A pointer to the next node in the list.
Definition pm_list.h:48
This represents the overall linked list.
Definition pm_list.h:55
pm_list_node_t * tail
A pointer to the tail of the list.
Definition pm_list.h:63
pm_list_node_t * head
A pointer to the head of the list.
Definition pm_list.h:60
size_t size
The size of the list.
Definition pm_list.h:57
the getlocal and setlocal instructions require two parameters.
LocalVariableAndWriteNode.
Definition ast.h:5599
pm_constant_id_t name
LocalVariableAndWriteNode::name.
Definition ast.h:5622
uint32_t depth
LocalVariableAndWriteNode::depth.
Definition ast.h:5627
struct pm_node * value
LocalVariableAndWriteNode::value.
Definition ast.h:5617
LocalVariableOperatorWriteNode.
Definition ast.h:5642
uint32_t depth
LocalVariableOperatorWriteNode::depth.
Definition ast.h:5675
pm_constant_id_t binary_operator
LocalVariableOperatorWriteNode::binary_operator.
Definition ast.h:5670
struct pm_node * value
LocalVariableOperatorWriteNode::value.
Definition ast.h:5660
pm_constant_id_t name
LocalVariableOperatorWriteNode::name.
Definition ast.h:5665
LocalVariableOrWriteNode.
Definition ast.h:5690
uint32_t depth
LocalVariableOrWriteNode::depth.
Definition ast.h:5718
struct pm_node * value
LocalVariableOrWriteNode::value.
Definition ast.h:5708
pm_constant_id_t name
LocalVariableOrWriteNode::name.
Definition ast.h:5713
LocalVariableReadNode.
Definition ast.h:5733
uint32_t depth
LocalVariableReadNode::depth.
Definition ast.h:5764
pm_constant_id_t name
LocalVariableReadNode::name.
Definition ast.h:5751
LocalVariableTargetNode.
Definition ast.h:5782
uint32_t depth
LocalVariableTargetNode::depth.
Definition ast.h:5795
pm_constant_id_t name
LocalVariableTargetNode::name.
Definition ast.h:5790
LocalVariableWriteNode.
Definition ast.h:5810
struct pm_node * value
LocalVariableWriteNode::value.
Definition ast.h:5864
uint32_t depth
LocalVariableWriteNode::depth.
Definition ast.h:5837
pm_constant_id_t name
LocalVariableWriteNode::name.
Definition ast.h:5824
This represents a range of bytes in the source string to which a node or token corresponds.
Definition ast.h:544
const uint8_t * start
A pointer to the start location of the range in the source.
Definition ast.h:546
const uint8_t * end
A pointer to the end location of the range in the source.
Definition ast.h:549
MatchLastLineNode.
Definition ast.h:5902
MatchPredicateNode.
Definition ast.h:5940
struct pm_node * pattern
MatchPredicateNode::pattern.
Definition ast.h:5953
struct pm_node * value
MatchPredicateNode::value.
Definition ast.h:5948
MatchRequiredNode.
Definition ast.h:5973
struct pm_node * value
MatchRequiredNode::value.
Definition ast.h:5986
struct pm_node * pattern
MatchRequiredNode::pattern.
Definition ast.h:6035
MatchWriteNode.
Definition ast.h:6060
struct pm_node_list targets
MatchWriteNode::targets.
Definition ast.h:6073
struct pm_call_node * call
MatchWriteNode::call.
Definition ast.h:6068
ModuleNode.
Definition ast.h:6103
struct pm_node * constant_path
ModuleNode::constant_path.
Definition ast.h:6121
struct pm_node * body
ModuleNode::body.
Definition ast.h:6126
pm_constant_id_list_t locals
ModuleNode::locals.
Definition ast.h:6111
pm_constant_id_t name
ModuleNode::name.
Definition ast.h:6136
MultiTargetNode.
Definition ast.h:6156
struct pm_node_list lefts
MultiTargetNode::lefts.
Definition ast.h:6174
struct pm_node * rest
MultiTargetNode::rest.
Definition ast.h:6194
struct pm_node_list rights
MultiTargetNode::rights.
Definition ast.h:6204
This is a node in the multi target state linked list.
As we're compiling a multi target, we need to track additional information whenever there is a parent...
MultiWriteNode.
Definition ast.h:6239
struct pm_node * value
MultiWriteNode::value.
Definition ast.h:6327
struct pm_node * rest
MultiWriteNode::rest.
Definition ast.h:6277
struct pm_node_list rights
MultiWriteNode::rights.
Definition ast.h:6287
struct pm_node_list lefts
MultiWriteNode::lefts.
Definition ast.h:6257
A list of offsets of newlines in a string.
const uint8_t * start
A pointer to the start of the source string.
size_t * offsets
The list of offsets.
size_t size
The number of offsets in the list.
NextNode.
Definition ast.h:6342
struct pm_arguments_node * arguments
NextNode::arguments.
Definition ast.h:6350
A list of nodes in the source, most often used for lists of children.
Definition ast.h:557
size_t size
The number of nodes in the list.
Definition ast.h:559
struct pm_node ** nodes
The nodes in the list.
Definition ast.h:565
This compiler defines its own concept of the location of a node.
int32_t line
This is the line number of a node.
uint32_t node_id
This is a unique identifier for the node.
This is the base structure that represents a node in the syntax tree.
Definition ast.h:1068
pm_node_type_t type
This represents the type of the node.
Definition ast.h:1073
uint32_t node_id
The unique identifier for this node, which is deterministic based on the source.
Definition ast.h:1085
pm_node_flags_t flags
This represents any flags on the node.
Definition ast.h:1079
pm_location_t location
This is the location of the node in the source.
Definition ast.h:1091
NumberedParametersNode.
Definition ast.h:6417
NumberedReferenceReadNode.
Definition ast.h:6440
uint32_t number
NumberedReferenceReadNode::number.
Definition ast.h:6456
OptionalKeywordParameterNode.
Definition ast.h:6475
pm_constant_id_t name
OptionalKeywordParameterNode::name.
Definition ast.h:6483
struct pm_node * value
OptionalKeywordParameterNode::value.
Definition ast.h:6493
OptionalParameterNode.
Definition ast.h:6512
struct pm_node * value
OptionalParameterNode::value.
Definition ast.h:6535
pm_constant_id_t name
OptionalParameterNode::name.
Definition ast.h:6520
The options that can be passed to the parser.
Definition options.h:107
pm_options_version_t version
The version of prism that we should be parsing with.
Definition options.h:153
OrNode.
Definition ast.h:6550
struct pm_node * left
OrNode::left.
Definition ast.h:6566
struct pm_node * right
OrNode::right.
Definition ast.h:6579
ParametersNode.
Definition ast.h:6605
struct pm_node * rest
ParametersNode::rest.
Definition ast.h:6623
struct pm_node_list requireds
ParametersNode::requireds.
Definition ast.h:6613
struct pm_block_parameter_node * block
ParametersNode::block.
Definition ast.h:6643
struct pm_node_list optionals
ParametersNode::optionals.
Definition ast.h:6618
struct pm_node_list posts
ParametersNode::posts.
Definition ast.h:6628
pm_node_t base
The embedded base node.
Definition ast.h:6607
struct pm_node * keyword_rest
ParametersNode::keyword_rest.
Definition ast.h:6638
struct pm_node_list keywords
ParametersNode::keywords.
Definition ast.h:6633
ParenthesesNode.
Definition ast.h:6661
struct pm_node * body
ParenthesesNode::body.
Definition ast.h:6669
The format that will be used to format the errors into the output.
size_t blank_prefix_length
The length of the blank prefix.
const char * blank_prefix
The prefix that will be used for blank lines.
size_t divider_length
The length of the divider.
const char * number_prefix
The prefix that will be used for line numbers.
const char * divider
The divider that will be used between sections of source code.
An error that is going to be formatted into the output.
pm_diagnostic_t * error
A pointer to the diagnostic that was generated during parsing.
uint32_t column_end
The column end of the diagnostic message.
int32_t line
The start line of the diagnostic message.
uint32_t column_start
The column start of the diagnostic message.
bool parsed
Whether or not this parse result has performed its parsing yet.
pm_scope_node_t node
The resulting scope node that will hold the generated AST.
pm_string_t input
The input that represents the source to be parsed.
pm_parser_t parser
The parser that will do the actual parsing.
pm_options_t options
The options that will be passed to the parser.
This struct represents the overall parser.
Definition parser.h:643
const pm_encoding_t * encoding
The encoding functions for the current file is attached to the parser as it's parsing so that it can ...
Definition parser.h:758
const uint8_t * end
The pointer to the end of the source.
Definition parser.h:697
pm_constant_pool_t constant_pool
This constant pool keeps all of the constants defined throughout the file so that we can reference th...
Definition parser.h:789
const uint8_t * start
The pointer to the start of the source.
Definition parser.h:694
pm_list_t error_list
The list of errors that have been found while parsing.
Definition parser.h:737
pm_list_t warning_list
The list of warnings that have been found while parsing.
Definition parser.h:734
int32_t start_line
The line number at the start of the parse.
Definition parser.h:812
pm_string_t filepath
This is the path of the file being parsed.
Definition parser.h:783
pm_newline_list_t newline_list
This is the list of newline offsets in the source file.
Definition parser.h:792
PinnedExpressionNode.
Definition ast.h:6694
PinnedVariableNode.
Definition ast.h:6752
struct pm_node * variable
PinnedVariableNode::variable.
Definition ast.h:6765
PostExecutionNode.
Definition ast.h:6790
struct pm_statements_node * statements
PostExecutionNode::statements.
Definition ast.h:6798
PreExecutionNode.
Definition ast.h:6828
struct pm_statements_node * statements
PreExecutionNode::statements.
Definition ast.h:6836
ProgramNode.
Definition ast.h:6863
struct pm_statements_node * statements
ProgramNode::statements.
Definition ast.h:6876
RangeNode.
Definition ast.h:6897
struct pm_node * right
RangeNode::right.
Definition ast.h:6927
struct pm_node * left
RangeNode::left.
Definition ast.h:6913
RationalNode.
Definition ast.h:6955
pm_integer_t denominator
RationalNode::denominator.
Definition ast.h:6976
pm_integer_t numerator
RationalNode::numerator.
Definition ast.h:6967
RegularExpressionNode.
Definition ast.h:7022
RequiredKeywordParameterNode.
Definition ast.h:7064
RequiredParameterNode.
Definition ast.h:7096
pm_constant_id_t name
RequiredParameterNode::name.
Definition ast.h:7104
RescueModifierNode.
Definition ast.h:7119
struct pm_node * rescue_expression
RescueModifierNode::rescue_expression.
Definition ast.h:7137
struct pm_node * expression
RescueModifierNode::expression.
Definition ast.h:7127
RescueNode.
Definition ast.h:7157
struct pm_rescue_node * subsequent
RescueNode::subsequent.
Definition ast.h:7195
struct pm_node * reference
RescueNode::reference.
Definition ast.h:7180
struct pm_node_list exceptions
RescueNode::exceptions.
Definition ast.h:7170
struct pm_statements_node * statements
RescueNode::statements.
Definition ast.h:7190
RestParameterNode.
Definition ast.h:7214
ReturnNode.
Definition ast.h:7265
struct pm_arguments_node * arguments
ReturnNode::arguments.
Definition ast.h:7278
rb_encoding * filepath_encoding
This is the encoding of the actual filepath object that will be used when a FILE node is compiled or ...
struct iseq_link_anchor * pre_execution_anchor
This will only be set on the top-level scope node.
VALUE * script_lines
This is a pointer to the list of script lines for the ISEQs that will be associated with this scope n...
ShareableConstantNode.
Definition ast.h:7317
struct pm_node * write
ShareableConstantNode::write.
Definition ast.h:7327
pm_node_t base
The embedded base node.
Definition ast.h:7319
SingletonClassNode.
Definition ast.h:7342
pm_constant_id_list_t locals
SingletonClassNode::locals.
Definition ast.h:7350
struct pm_node * expression
SingletonClassNode::expression.
Definition ast.h:7365
struct pm_node * body
SingletonClassNode::body.
Definition ast.h:7370
SourceFileNode.
Definition ast.h:7414
pm_string_t filepath
SourceFileNode::filepath.
Definition ast.h:7424
SplatNode.
Definition ast.h:7457
struct pm_node * expression
SplatNode::expression.
Definition ast.h:7470
StatementsNode.
Definition ast.h:7485
struct pm_node_list body
StatementsNode::body.
Definition ast.h:7493
pm_node_t base
The embedded base node.
Definition ast.h:7487
StringNode.
Definition ast.h:7520
pm_string_t unescaped
StringNode::unescaped.
Definition ast.h:7543
A generic string type that can have various ownership semantics.
Definition pm_string.h:33
SuperNode.
Definition ast.h:7563
struct pm_arguments_node * arguments
SuperNode::arguments.
Definition ast.h:7583
struct pm_node * block
SuperNode::block.
Definition ast.h:7593
SymbolNode.
Definition ast.h:7616
pm_string_t unescaped
SymbolNode::unescaped.
Definition ast.h:7639
pm_node_t base
The embedded base node.
Definition ast.h:7618
UndefNode.
Definition ast.h:7672
struct pm_node_list names
UndefNode::names.
Definition ast.h:7680
UnlessNode.
Definition ast.h:7703
struct pm_statements_node * statements
UnlessNode::statements.
Definition ast.h:7753
struct pm_node * predicate
UnlessNode::predicate.
Definition ast.h:7732
struct pm_else_node * else_clause
UnlessNode::else_clause.
Definition ast.h:7763
UntilNode.
Definition ast.h:7794
pm_node_t base
The embedded base node.
Definition ast.h:7796
WhenNode.
Definition ast.h:7839
WhileNode.
Definition ast.h:7883
pm_node_t base
The embedded base node.
Definition ast.h:7885
XStringNode.
Definition ast.h:7930
pm_string_t unescaped
XStringNode::unescaped.
Definition ast.h:7953
YieldNode.
Definition ast.h:7968
struct pm_arguments_node * arguments
YieldNode::arguments.
Definition ast.h:7986
Definition st.h:79
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376