Ruby 4.0.0dev (2025-12-23 revision e69f41a0a88df1d843f5d94cc4e5e757b96300e0)
prism_compile.c (e69f41a0a88df1d843f5d94cc4e5e757b96300e0)
1#include "prism.h"
2#include "ruby/version.h"
3
9typedef struct {
11 int32_t line;
12
14 uint32_t node_id;
16
17/******************************************************************************/
18/* These macros operate on pm_node_location_t structs as opposed to NODE*s. */
19/******************************************************************************/
20
21#define PUSH_ADJUST(seq, location, label) \
22 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), (int) (location).line))
23
24#define PUSH_ADJUST_RESTORE(seq, label) \
25 ADD_ELEM((seq), (LINK_ELEMENT *) new_adjust_body(iseq, (label), -1))
26
27#define PUSH_INSN(seq, location, insn) \
28 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).node_id, BIN(insn), 0))
29
30#define PUSH_INSN1(seq, location, insn, op1) \
31 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).node_id, BIN(insn), 1, (VALUE)(op1)))
32
33#define PUSH_INSN2(seq, location, insn, op1, op2) \
34 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).node_id, BIN(insn), 2, (VALUE)(op1), (VALUE)(op2)))
35
36#define PUSH_INSN3(seq, location, insn, op1, op2, op3) \
37 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_body(iseq, (int) (location).line, (int) (location).node_id, BIN(insn), 3, (VALUE)(op1), (VALUE)(op2), (VALUE)(op3)))
38
39#define PUSH_INSNL(seq, location, insn, label) \
40 (PUSH_INSN1(seq, location, insn, label), LABEL_REF(label))
41
42#define PUSH_LABEL(seq, label) \
43 ADD_ELEM((seq), (LINK_ELEMENT *) (label))
44
45#define PUSH_SEND_R(seq, location, id, argc, block, flag, keywords) \
46 ADD_ELEM((seq), (LINK_ELEMENT *) new_insn_send(iseq, (int) (location).line, (int) (location).node_id, (id), (VALUE)(argc), (block), (VALUE)(flag), (keywords)))
47
48#define PUSH_SEND(seq, location, id, argc) \
49 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)INT2FIX(0), NULL)
50
51#define PUSH_SEND_WITH_FLAG(seq, location, id, argc, flag) \
52 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)(flag), NULL)
53
54#define PUSH_SEND_WITH_BLOCK(seq, location, id, argc, block) \
55 PUSH_SEND_R((seq), location, (id), (argc), (block), (VALUE)INT2FIX(0), NULL)
56
57#define PUSH_CALL(seq, location, id, argc) \
58 PUSH_SEND_R((seq), location, (id), (argc), NULL, (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
59
60#define PUSH_CALL_WITH_BLOCK(seq, location, id, argc, block) \
61 PUSH_SEND_R((seq), location, (id), (argc), (block), (VALUE)INT2FIX(VM_CALL_FCALL), NULL)
62
63#define PUSH_TRACE(seq, event) \
64 ADD_ELEM((seq), (LINK_ELEMENT *) new_trace_body(iseq, (event), 0))
65
66#define PUSH_CATCH_ENTRY(type, ls, le, iseqv, lc) \
67 ADD_CATCH_ENTRY((type), (ls), (le), (iseqv), (lc))
68
69#define PUSH_SEQ(seq1, seq2) \
70 APPEND_LIST((seq1), (seq2))
71
72#define PUSH_SYNTHETIC_PUTNIL(seq, iseq) \
73 do { \
74 int lineno = ISEQ_COMPILE_DATA(iseq)->last_line; \
75 if (lineno == 0) lineno = FIX2INT(rb_iseq_first_lineno(iseq)); \
76 ADD_SYNTHETIC_INSN(seq, lineno, -1, putnil); \
77 } while (0)
78
79/******************************************************************************/
80/* These functions compile getlocal/setlocal instructions but operate on */
81/* prism locations instead of NODEs. */
82/******************************************************************************/
83
84static void
85pm_iseq_add_getlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, int line, int node_id, int idx, int level)
86{
87 if (iseq_local_block_param_p(iseq, idx, level)) {
88 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line, node_id, BIN(getblockparam), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
89 }
90 else {
91 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line, node_id, BIN(getlocal), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
92 }
93 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level), Qfalse);
94}
95
96static void
97pm_iseq_add_setlocal(rb_iseq_t *iseq, LINK_ANCHOR *const seq, int line, int node_id, int idx, int level)
98{
99 if (iseq_local_block_param_p(iseq, idx, level)) {
100 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line, node_id, BIN(setblockparam), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
101 }
102 else {
103 ADD_ELEM(seq, (LINK_ELEMENT *) new_insn_body(iseq, line, node_id, BIN(setlocal), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
104 }
105 update_lvar_state(iseq, level, idx);
106 if (level > 0) access_outer_variables(iseq, level, iseq_lvar_id(iseq, idx, level), Qtrue);
107}
108
109#define PUSH_GETLOCAL(seq, location, idx, level) \
110 pm_iseq_add_getlocal(iseq, (seq), (int) (location).line, (int) (location).node_id, (idx), (level))
111
112#define PUSH_SETLOCAL(seq, location, idx, level) \
113 pm_iseq_add_setlocal(iseq, (seq), (int) (location).line, (int) (location).node_id, (idx), (level))
114
115/******************************************************************************/
116/* These are helper macros for the compiler. */
117/******************************************************************************/
118
119#define OLD_ISEQ NEW_ISEQ
120#undef NEW_ISEQ
121
122#define NEW_ISEQ(node, name, type, line_no) \
123 pm_new_child_iseq(iseq, (node), rb_fstring(name), 0, (type), (line_no))
124
125#define OLD_CHILD_ISEQ NEW_CHILD_ISEQ
126#undef NEW_CHILD_ISEQ
127
128#define NEW_CHILD_ISEQ(node, name, type, line_no) \
129 pm_new_child_iseq(iseq, (node), rb_fstring(name), iseq, (type), (line_no))
130
131#define PM_COMPILE(node) \
132 pm_compile_node(iseq, (node), ret, popped, scope_node)
133
134#define PM_COMPILE_INTO_ANCHOR(_ret, node) \
135 pm_compile_node(iseq, (node), _ret, popped, scope_node)
136
137#define PM_COMPILE_POPPED(node) \
138 pm_compile_node(iseq, (node), ret, true, scope_node)
139
140#define PM_COMPILE_NOT_POPPED(node) \
141 pm_compile_node(iseq, (node), ret, false, scope_node)
142
143#define PM_NODE_START_LOCATION(parser, node) \
144 ((pm_node_location_t) { .line = pm_newline_list_line(&(parser)->newline_list, ((const pm_node_t *) (node))->location.start, (parser)->start_line), .node_id = ((const pm_node_t *) (node))->node_id })
145
146#define PM_NODE_END_LOCATION(parser, node) \
147 ((pm_node_location_t) { .line = pm_newline_list_line(&(parser)->newline_list, ((const pm_node_t *) (node))->location.end, (parser)->start_line), .node_id = ((const pm_node_t *) (node))->node_id })
148
149#define PM_LOCATION_START_LOCATION(parser, location, id) \
150 ((pm_node_location_t) { .line = pm_newline_list_line(&(parser)->newline_list, (location)->start, (parser)->start_line), .node_id = id })
151
152#define PM_NODE_START_LINE_COLUMN(parser, node) \
153 pm_newline_list_line_column(&(parser)->newline_list, ((const pm_node_t *) (node))->location.start, (parser)->start_line)
154
155#define PM_NODE_END_LINE_COLUMN(parser, node) \
156 pm_newline_list_line_column(&(parser)->newline_list, ((const pm_node_t *) (node))->location.end, (parser)->start_line)
157
158#define PM_LOCATION_START_LINE_COLUMN(parser, location) \
159 pm_newline_list_line_column(&(parser)->newline_list, (location)->start, (parser)->start_line)
160
161static int
162pm_node_line_number(const pm_parser_t *parser, const pm_node_t *node)
163{
164 return (int) pm_newline_list_line(&parser->newline_list, node->location.start, parser->start_line);
165}
166
167static int
168pm_location_line_number(const pm_parser_t *parser, const pm_location_t *location) {
169 return (int) pm_newline_list_line(&parser->newline_list, location->start, parser->start_line);
170}
171
175static VALUE
176parse_integer_value(const pm_integer_t *integer)
177{
178 VALUE result;
179
180 if (integer->values == NULL) {
181 result = UINT2NUM(integer->value);
182 }
183 else {
184 VALUE string = rb_str_new(NULL, integer->length * 8);
185 unsigned char *bytes = (unsigned char *) RSTRING_PTR(string);
186
187 size_t offset = integer->length * 8;
188 for (size_t value_index = 0; value_index < integer->length; value_index++) {
189 uint32_t value = integer->values[value_index];
190
191 for (int index = 0; index < 8; index++) {
192 int byte = (value >> (4 * index)) & 0xf;
193 bytes[--offset] = byte < 10 ? byte + '0' : byte - 10 + 'a';
194 }
195 }
196
197 result = rb_funcall(string, rb_intern("to_i"), 1, UINT2NUM(16));
198 }
199
200 if (integer->negative) {
201 result = rb_funcall(result, rb_intern("-@"), 0);
202 }
203
204 if (!SPECIAL_CONST_P(result)) {
205 RB_OBJ_SET_SHAREABLE(result); // bignum
206 }
207
208 return result;
209}
210
214static inline VALUE
215parse_integer(const pm_integer_node_t *node)
216{
217 return parse_integer_value(&node->value);
218}
219
223static VALUE
224parse_float(const pm_float_node_t *node)
225{
226 VALUE val = DBL2NUM(node->value);
227 if (!FLONUM_P(val)) {
228 RB_OBJ_SET_SHAREABLE(val);
229 }
230 return val;
231}
232
239static VALUE
240parse_rational(const pm_rational_node_t *node)
241{
242 VALUE numerator = parse_integer_value(&node->numerator);
243 VALUE denominator = parse_integer_value(&node->denominator);
244
245 return rb_ractor_make_shareable(rb_rational_new(numerator, denominator));
246}
247
254static VALUE
255parse_imaginary(const pm_imaginary_node_t *node)
256{
257 VALUE imaginary_part;
258 switch (PM_NODE_TYPE(node->numeric)) {
259 case PM_FLOAT_NODE: {
260 imaginary_part = parse_float((const pm_float_node_t *) node->numeric);
261 break;
262 }
263 case PM_INTEGER_NODE: {
264 imaginary_part = parse_integer((const pm_integer_node_t *) node->numeric);
265 break;
266 }
267 case PM_RATIONAL_NODE: {
268 imaginary_part = parse_rational((const pm_rational_node_t *) node->numeric);
269 break;
270 }
271 default:
272 rb_bug("Unexpected numeric type on imaginary number %s\n", pm_node_type_to_str(PM_NODE_TYPE(node->numeric)));
273 }
274
275 return RB_OBJ_SET_SHAREABLE(rb_complex_raw(INT2FIX(0), imaginary_part));
276}
277
278static inline VALUE
279parse_string(const pm_scope_node_t *scope_node, const pm_string_t *string)
280{
281 return rb_enc_str_new((const char *) pm_string_source(string), pm_string_length(string), scope_node->encoding);
282}
283
289static inline VALUE
290parse_string_encoded(const pm_node_t *node, const pm_string_t *string, rb_encoding *default_encoding)
291{
292 rb_encoding *encoding;
293
294 if (node->flags & PM_ENCODING_FLAGS_FORCED_BINARY_ENCODING) {
295 encoding = rb_ascii8bit_encoding();
296 }
297 else if (node->flags & PM_ENCODING_FLAGS_FORCED_UTF8_ENCODING) {
298 encoding = rb_utf8_encoding();
299 }
300 else {
301 encoding = default_encoding;
302 }
303
304 return rb_enc_str_new((const char *) pm_string_source(string), pm_string_length(string), encoding);
305}
306
307static inline VALUE
308parse_static_literal_string(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *string)
309{
310 rb_encoding *encoding;
311
312 if (node->flags & PM_STRING_FLAGS_FORCED_BINARY_ENCODING) {
313 encoding = rb_ascii8bit_encoding();
314 }
315 else if (node->flags & PM_STRING_FLAGS_FORCED_UTF8_ENCODING) {
316 encoding = rb_utf8_encoding();
317 }
318 else {
319 encoding = scope_node->encoding;
320 }
321
322 VALUE value = rb_enc_literal_str((const char *) pm_string_source(string), pm_string_length(string), encoding);
324
325 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal || RTEST(ruby_debug)) {
326 int line_number = pm_node_line_number(scope_node->parser, node);
327 value = rb_ractor_make_shareable(rb_str_with_debug_created_info(value, rb_iseq_path(iseq), line_number));
328 }
329
330 return value;
331}
332
333static inline ID
334parse_string_symbol(const pm_scope_node_t *scope_node, const pm_symbol_node_t *symbol)
335{
336 rb_encoding *encoding;
337 if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_UTF8_ENCODING) {
338 encoding = rb_utf8_encoding();
339 }
340 else if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_BINARY_ENCODING) {
341 encoding = rb_ascii8bit_encoding();
342 }
343 else if (symbol->base.flags & PM_SYMBOL_FLAGS_FORCED_US_ASCII_ENCODING) {
344 encoding = rb_usascii_encoding();
345 }
346 else {
347 encoding = scope_node->encoding;
348 }
349
350 return rb_intern3((const char *) pm_string_source(&symbol->unescaped), pm_string_length(&symbol->unescaped), encoding);
351}
352
353static int
354pm_optimizable_range_item_p(const pm_node_t *node)
355{
356 return (!node || PM_NODE_TYPE_P(node, PM_INTEGER_NODE) || PM_NODE_TYPE_P(node, PM_NIL_NODE));
357}
358
360static VALUE
361parse_regexp_error(rb_iseq_t *iseq, int32_t line_number, const char *fmt, ...)
362{
363 va_list args;
364 va_start(args, fmt);
365 VALUE error = rb_syntax_error_append(Qnil, rb_iseq_path(iseq), line_number, -1, NULL, "%" PRIsVALUE, args);
366 va_end(args);
367 rb_exc_raise(error);
368}
369
370static VALUE
371parse_regexp_string_part(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *unescaped, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding)
372{
373 // If we were passed an explicit regexp encoding, then we need to double
374 // check that it's okay here for this fragment of the string.
375 rb_encoding *encoding;
376
377 if (explicit_regexp_encoding != NULL) {
378 encoding = explicit_regexp_encoding;
379 }
380 else if (node->flags & PM_STRING_FLAGS_FORCED_BINARY_ENCODING) {
381 encoding = rb_ascii8bit_encoding();
382 }
383 else if (node->flags & PM_STRING_FLAGS_FORCED_UTF8_ENCODING) {
384 encoding = rb_utf8_encoding();
385 }
386 else {
387 encoding = implicit_regexp_encoding;
388 }
389
390 VALUE string = rb_enc_str_new((const char *) pm_string_source(unescaped), pm_string_length(unescaped), encoding);
391 VALUE error = rb_reg_check_preprocess(string);
392
393 if (error != Qnil) parse_regexp_error(iseq, pm_node_line_number(scope_node->parser, node), "%" PRIsVALUE, rb_obj_as_string(error));
394 return string;
395}
396
397static VALUE
398pm_static_literal_concat(rb_iseq_t *iseq, const pm_node_list_t *nodes, const pm_scope_node_t *scope_node, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding, bool top)
399{
400 VALUE current = Qnil;
401
402 for (size_t index = 0; index < nodes->size; index++) {
403 const pm_node_t *part = nodes->nodes[index];
404 VALUE string;
405
406 switch (PM_NODE_TYPE(part)) {
407 case PM_STRING_NODE:
408 if (implicit_regexp_encoding != NULL) {
409 if (top) {
410 string = parse_regexp_string_part(iseq, scope_node, part, &((const pm_string_node_t *) part)->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
411 }
412 else {
413 string = parse_string_encoded(part, &((const pm_string_node_t *) part)->unescaped, scope_node->encoding);
414 VALUE error = rb_reg_check_preprocess(string);
415 if (error != Qnil) parse_regexp_error(iseq, pm_node_line_number(scope_node->parser, part), "%" PRIsVALUE, rb_obj_as_string(error));
416 }
417 }
418 else {
419 string = parse_string_encoded(part, &((const pm_string_node_t *) part)->unescaped, scope_node->encoding);
420 }
421 break;
422 case PM_INTERPOLATED_STRING_NODE:
423 string = pm_static_literal_concat(iseq, &((const pm_interpolated_string_node_t *) part)->parts, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
424 break;
425 case PM_EMBEDDED_STATEMENTS_NODE: {
427 string = pm_static_literal_concat(iseq, &cast->statements->body, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
428 break;
429 }
430 default:
431 RUBY_ASSERT(false && "unexpected node type in pm_static_literal_concat");
432 return Qnil;
433 }
434
435 if (current != Qnil) {
436 current = rb_str_concat(current, string);
437 }
438 else {
439 current = string;
440 }
441 }
442
443 return top ? rb_fstring(current) : current;
444}
445
446#define RE_OPTION_ENCODING_SHIFT 8
447#define RE_OPTION_ENCODING(encoding) (((encoding) & 0xFF) << RE_OPTION_ENCODING_SHIFT)
448#define ARG_ENCODING_NONE 32
449#define ARG_ENCODING_FIXED 16
450#define ENC_ASCII8BIT 1
451#define ENC_EUC_JP 2
452#define ENC_Windows_31J 3
453#define ENC_UTF8 4
454
459static int
460parse_regexp_flags(const pm_node_t *node)
461{
462 int flags = 0;
463
464 // Check "no encoding" first so that flags don't get clobbered
465 // We're calling `rb_char_to_option_kcode` in this case so that
466 // we don't need to have access to `ARG_ENCODING_NONE`
467 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ASCII_8BIT)) {
468 flags |= ARG_ENCODING_NONE;
469 }
470
471 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EUC_JP)) {
472 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_EUC_JP));
473 }
474
475 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_WINDOWS_31J)) {
476 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_Windows_31J));
477 }
478
479 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_UTF_8)) {
480 flags |= (ARG_ENCODING_FIXED | RE_OPTION_ENCODING(ENC_UTF8));
481 }
482
483 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_IGNORE_CASE)) {
484 flags |= ONIG_OPTION_IGNORECASE;
485 }
486
487 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_MULTI_LINE)) {
488 flags |= ONIG_OPTION_MULTILINE;
489 }
490
491 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EXTENDED)) {
492 flags |= ONIG_OPTION_EXTEND;
493 }
494
495 return flags;
496}
497
498#undef RE_OPTION_ENCODING_SHIFT
499#undef RE_OPTION_ENCODING
500#undef ARG_ENCODING_FIXED
501#undef ARG_ENCODING_NONE
502#undef ENC_ASCII8BIT
503#undef ENC_EUC_JP
504#undef ENC_Windows_31J
505#undef ENC_UTF8
506
507static rb_encoding *
508parse_regexp_encoding(const pm_scope_node_t *scope_node, const pm_node_t *node)
509{
510 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_FORCED_BINARY_ENCODING) || PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ASCII_8BIT)) {
511 return rb_ascii8bit_encoding();
512 }
513 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_UTF_8)) {
514 return rb_utf8_encoding();
515 }
516 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_EUC_JP)) {
517 return rb_enc_get_from_index(ENCINDEX_EUC_JP);
518 }
519 else if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_WINDOWS_31J)) {
520 return rb_enc_get_from_index(ENCINDEX_Windows_31J);
521 }
522 else {
523 return NULL;
524 }
525}
526
527static VALUE
528parse_regexp(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, VALUE string)
529{
530 VALUE errinfo = rb_errinfo();
531
532 int32_t line_number = pm_node_line_number(scope_node->parser, node);
533 VALUE regexp = rb_reg_compile(string, parse_regexp_flags(node), (const char *) pm_string_source(&scope_node->parser->filepath), line_number);
534
535 if (NIL_P(regexp)) {
536 VALUE message = rb_attr_get(rb_errinfo(), idMesg);
537 rb_set_errinfo(errinfo);
538
539 parse_regexp_error(iseq, line_number, "%" PRIsVALUE, message);
540 return Qnil;
541 }
542
543 return RB_OBJ_SET_SHAREABLE(rb_obj_freeze(regexp));
544}
545
546static inline VALUE
547parse_regexp_literal(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_string_t *unescaped)
548{
549 rb_encoding *regexp_encoding = parse_regexp_encoding(scope_node, node);
550 if (regexp_encoding == NULL) regexp_encoding = scope_node->encoding;
551
552 VALUE string = rb_enc_str_new((const char *) pm_string_source(unescaped), pm_string_length(unescaped), regexp_encoding);
553 RB_OBJ_SET_SHAREABLE(string);
554 return parse_regexp(iseq, scope_node, node, string);
555}
556
557static inline VALUE
558parse_regexp_concat(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, const pm_node_list_t *parts)
559{
560 rb_encoding *explicit_regexp_encoding = parse_regexp_encoding(scope_node, node);
561 rb_encoding *implicit_regexp_encoding = explicit_regexp_encoding != NULL ? explicit_regexp_encoding : scope_node->encoding;
562
563 VALUE string = pm_static_literal_concat(iseq, parts, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false);
564 return parse_regexp(iseq, scope_node, node, string);
565}
566
567static void pm_compile_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node);
568
569static int
570pm_interpolated_node_compile(rb_iseq_t *iseq, const pm_node_list_t *parts, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, rb_encoding *implicit_regexp_encoding, rb_encoding *explicit_regexp_encoding, bool mutable_result, bool frozen_result)
571{
572 int stack_size = 0;
573 size_t parts_size = parts->size;
574 bool interpolated = false;
575
576 if (parts_size > 0) {
577 VALUE current_string = Qnil;
578 pm_node_location_t current_location = *node_location;
579
580 for (size_t index = 0; index < parts_size; index++) {
581 const pm_node_t *part = parts->nodes[index];
582
583 if (PM_NODE_TYPE_P(part, PM_STRING_NODE)) {
584 const pm_string_node_t *string_node = (const pm_string_node_t *) part;
585 VALUE string_value;
586
587 if (implicit_regexp_encoding == NULL) {
588 string_value = parse_string_encoded(part, &string_node->unescaped, scope_node->encoding);
589 }
590 else {
591 string_value = parse_regexp_string_part(iseq, scope_node, (const pm_node_t *) string_node, &string_node->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
592 }
593
594 if (RTEST(current_string)) {
595 current_string = rb_str_concat(current_string, string_value);
596 }
597 else {
598 current_string = string_value;
599 if (index != 0) current_location = PM_NODE_END_LOCATION(scope_node->parser, part);
600 }
601 }
602 else {
603 interpolated = true;
604
605 if (
606 PM_NODE_TYPE_P(part, PM_EMBEDDED_STATEMENTS_NODE) &&
607 ((const pm_embedded_statements_node_t *) part)->statements != NULL &&
608 ((const pm_embedded_statements_node_t *) part)->statements->body.size == 1 &&
609 PM_NODE_TYPE_P(((const pm_embedded_statements_node_t *) part)->statements->body.nodes[0], PM_STRING_NODE)
610 ) {
611 const pm_string_node_t *string_node = (const pm_string_node_t *) ((const pm_embedded_statements_node_t *) part)->statements->body.nodes[0];
612 VALUE string_value;
613
614 if (implicit_regexp_encoding == NULL) {
615 string_value = parse_string_encoded(part, &string_node->unescaped, scope_node->encoding);
616 }
617 else {
618 string_value = parse_regexp_string_part(iseq, scope_node, (const pm_node_t *) string_node, &string_node->unescaped, implicit_regexp_encoding, explicit_regexp_encoding);
619 }
620
621 if (RTEST(current_string)) {
622 current_string = rb_str_concat(current_string, string_value);
623 }
624 else {
625 current_string = string_value;
626 current_location = PM_NODE_START_LOCATION(scope_node->parser, part);
627 }
628 }
629 else {
630 if (!RTEST(current_string)) {
631 rb_encoding *encoding;
632
633 if (implicit_regexp_encoding != NULL) {
634 if (explicit_regexp_encoding != NULL) {
635 encoding = explicit_regexp_encoding;
636 }
637 else if (scope_node->parser->encoding == PM_ENCODING_US_ASCII_ENTRY) {
638 encoding = rb_ascii8bit_encoding();
639 }
640 else {
641 encoding = implicit_regexp_encoding;
642 }
643 }
644 else {
645 encoding = scope_node->encoding;
646 }
647
648 if (parts_size == 1) {
649 current_string = rb_enc_str_new(NULL, 0, encoding);
650 }
651 }
652
653 if (RTEST(current_string)) {
654 VALUE operand = rb_fstring(current_string);
655 PUSH_INSN1(ret, current_location, putobject, operand);
656 stack_size++;
657 }
658
659 PM_COMPILE_NOT_POPPED(part);
660
661 const pm_node_location_t current_location = PM_NODE_START_LOCATION(scope_node->parser, part);
662 PUSH_INSN(ret, current_location, dup);
663
664 {
665 const struct rb_callinfo *callinfo = new_callinfo(iseq, idTo_s, 0, VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE, NULL, FALSE);
666 PUSH_INSN1(ret, current_location, objtostring, callinfo);
667 }
668
669 PUSH_INSN(ret, current_location, anytostring);
670
671 current_string = Qnil;
672 stack_size++;
673 }
674 }
675 }
676
677 if (RTEST(current_string)) {
678 current_string = rb_fstring(current_string);
679
680 if (stack_size == 0) {
681 if (frozen_result) {
682 PUSH_INSN1(ret, current_location, putobject, current_string);
683 } else if (mutable_result || interpolated) {
684 PUSH_INSN1(ret, current_location, putstring, current_string);
685 } else {
686 PUSH_INSN1(ret, current_location, putchilledstring, current_string);
687 }
688 } else {
689 PUSH_INSN1(ret, current_location, putobject, current_string);
690 }
691
692 current_string = Qnil;
693 stack_size++;
694 }
695 }
696 else {
697 PUSH_INSN(ret, *node_location, putnil);
698 }
699
700 return stack_size;
701}
702
703static void
704pm_compile_regexp_dynamic(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_list_t *parts, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
705{
706 rb_encoding *explicit_regexp_encoding = parse_regexp_encoding(scope_node, node);
707 rb_encoding *implicit_regexp_encoding = explicit_regexp_encoding != NULL ? explicit_regexp_encoding : scope_node->encoding;
708
709 int length = pm_interpolated_node_compile(iseq, parts, node_location, ret, popped, scope_node, implicit_regexp_encoding, explicit_regexp_encoding, false, false);
710 PUSH_INSN2(ret, *node_location, toregexp, INT2FIX(parse_regexp_flags(node) & 0xFF), INT2FIX(length));
711}
712
713static VALUE
714pm_source_file_value(const pm_source_file_node_t *node, const pm_scope_node_t *scope_node)
715{
716 const pm_string_t *filepath = &node->filepath;
717 size_t length = pm_string_length(filepath);
718
719 if (length > 0) {
720 rb_encoding *filepath_encoding = scope_node->filepath_encoding != NULL ? scope_node->filepath_encoding : rb_utf8_encoding();
721 return rb_enc_interned_str((const char *) pm_string_source(filepath), length, filepath_encoding);
722 }
723 else {
724 return rb_fstring_lit("<compiled>");
725 }
726}
727
732static VALUE
733pm_static_literal_string(rb_iseq_t *iseq, VALUE string, int line_number)
734{
735 if (ISEQ_COMPILE_DATA(iseq)->option->debug_frozen_string_literal || RTEST(ruby_debug)) {
736 VALUE str = rb_str_with_debug_created_info(string, rb_iseq_path(iseq), line_number);
737 RB_OBJ_SET_SHAREABLE(str);
738 return str;
739 }
740 else {
741 return rb_fstring(string);
742 }
743}
744
750static VALUE
751pm_static_literal_value(rb_iseq_t *iseq, const pm_node_t *node, const pm_scope_node_t *scope_node)
752{
753 // Every node that comes into this function should already be marked as
754 // static literal. If it's not, then we have a bug somewhere.
755 RUBY_ASSERT(PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL));
756
757 switch (PM_NODE_TYPE(node)) {
758 case PM_ARRAY_NODE: {
759 const pm_array_node_t *cast = (const pm_array_node_t *) node;
760 const pm_node_list_t *elements = &cast->elements;
761
762 VALUE value = rb_ary_hidden_new(elements->size);
763 for (size_t index = 0; index < elements->size; index++) {
764 rb_ary_push(value, pm_static_literal_value(iseq, elements->nodes[index], scope_node));
765 }
766
767 RB_OBJ_SET_FROZEN_SHAREABLE(value);
768 return value;
769 }
770 case PM_FALSE_NODE:
771 return Qfalse;
772 case PM_FLOAT_NODE:
773 return parse_float((const pm_float_node_t *) node);
774 case PM_HASH_NODE: {
775 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
776 const pm_node_list_t *elements = &cast->elements;
777
778 VALUE array = rb_ary_hidden_new(elements->size * 2);
779 for (size_t index = 0; index < elements->size; index++) {
780 RUBY_ASSERT(PM_NODE_TYPE_P(elements->nodes[index], PM_ASSOC_NODE));
781 const pm_assoc_node_t *cast = (const pm_assoc_node_t *) elements->nodes[index];
782 VALUE pair[2] = { pm_static_literal_value(iseq, cast->key, scope_node), pm_static_literal_value(iseq, cast->value, scope_node) };
783 rb_ary_cat(array, pair, 2);
784 }
785
786 VALUE value = rb_hash_new_with_size(elements->size);
787 rb_hash_bulk_insert(RARRAY_LEN(array), RARRAY_CONST_PTR(array), value);
788
789 value = rb_obj_hide(value);
790 RB_OBJ_SET_FROZEN_SHAREABLE(value);
791 return value;
792 }
793 case PM_IMAGINARY_NODE:
794 return parse_imaginary((const pm_imaginary_node_t *) node);
795 case PM_INTEGER_NODE:
796 return parse_integer((const pm_integer_node_t *) node);
797 case PM_INTERPOLATED_MATCH_LAST_LINE_NODE: {
799 return parse_regexp_concat(iseq, scope_node, (const pm_node_t *) cast, &cast->parts);
800 }
801 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
803 return parse_regexp_concat(iseq, scope_node, (const pm_node_t *) cast, &cast->parts);
804 }
805 case PM_INTERPOLATED_STRING_NODE: {
806 VALUE string = pm_static_literal_concat(iseq, &((const pm_interpolated_string_node_t *) node)->parts, scope_node, NULL, NULL, false);
807 int line_number = pm_node_line_number(scope_node->parser, node);
808 return pm_static_literal_string(iseq, string, line_number);
809 }
810 case PM_INTERPOLATED_SYMBOL_NODE: {
812 VALUE string = pm_static_literal_concat(iseq, &cast->parts, scope_node, NULL, NULL, true);
813
814 return ID2SYM(rb_intern_str(string));
815 }
816 case PM_MATCH_LAST_LINE_NODE: {
817 const pm_match_last_line_node_t *cast = (const pm_match_last_line_node_t *) node;
818 return parse_regexp_literal(iseq, scope_node, (const pm_node_t *) cast, &cast->unescaped);
819 }
820 case PM_NIL_NODE:
821 return Qnil;
822 case PM_RATIONAL_NODE:
823 return parse_rational((const pm_rational_node_t *) node);
824 case PM_REGULAR_EXPRESSION_NODE: {
826 return parse_regexp_literal(iseq, scope_node, (const pm_node_t *) cast, &cast->unescaped);
827 }
828 case PM_SOURCE_ENCODING_NODE:
829 return rb_enc_from_encoding(scope_node->encoding);
830 case PM_SOURCE_FILE_NODE: {
831 const pm_source_file_node_t *cast = (const pm_source_file_node_t *) node;
832 return pm_source_file_value(cast, scope_node);
833 }
834 case PM_SOURCE_LINE_NODE:
835 return INT2FIX(pm_node_line_number(scope_node->parser, node));
836 case PM_STRING_NODE: {
837 const pm_string_node_t *cast = (const pm_string_node_t *) node;
838 return parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
839 }
840 case PM_SYMBOL_NODE:
841 return ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) node));
842 case PM_TRUE_NODE:
843 return Qtrue;
844 default:
845 rb_bug("Don't have a literal value for node type %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
846 return Qfalse;
847 }
848}
849
854pm_code_location(const pm_scope_node_t *scope_node, const pm_node_t *node)
855{
856 const pm_line_column_t start_location = PM_NODE_START_LINE_COLUMN(scope_node->parser, node);
857 const pm_line_column_t end_location = PM_NODE_END_LINE_COLUMN(scope_node->parser, node);
858
859 return (rb_code_location_t) {
860 .beg_pos = { .lineno = start_location.line, .column = start_location.column },
861 .end_pos = { .lineno = end_location.line, .column = end_location.column }
862 };
863}
864
870#define PM_BRANCH_COVERAGE_P(iseq) (ISEQ_COVERAGE(iseq) && ISEQ_BRANCH_COVERAGE(iseq))
871
872static void
873pm_compile_branch_condition(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const pm_node_t *cond,
874 LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node);
875
876static void
877pm_compile_logical(rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_node_t *cond, LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node)
878{
879 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, cond);
880
881 DECL_ANCHOR(seq);
882
883 LABEL *label = NEW_LABEL(location.line);
884 if (!then_label) then_label = label;
885 else if (!else_label) else_label = label;
886
887 pm_compile_branch_condition(iseq, seq, cond, then_label, else_label, popped, scope_node);
888
889 if (LIST_INSN_SIZE_ONE(seq)) {
890 INSN *insn = (INSN *) ELEM_FIRST_INSN(FIRST_ELEMENT(seq));
891 if (insn->insn_id == BIN(jump) && (LABEL *)(insn->operands[0]) == label) return;
892 }
893
894 if (!label->refcnt) {
895 if (popped) PUSH_INSN(ret, location, putnil);
896 }
897 else {
898 PUSH_LABEL(seq, label);
899 }
900
901 PUSH_SEQ(ret, seq);
902 return;
903}
904
905static void
906pm_compile_flip_flop_bound(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
907{
908 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
909
910 if (PM_NODE_TYPE_P(node, PM_INTEGER_NODE)) {
911 PM_COMPILE_NOT_POPPED(node);
912
913 VALUE operand = ID2SYM(rb_intern("$."));
914 PUSH_INSN1(ret, location, getglobal, operand);
915
916 PUSH_SEND(ret, location, idEq, INT2FIX(1));
917 if (popped) PUSH_INSN(ret, location, pop);
918 }
919 else {
920 PM_COMPILE(node);
921 }
922}
923
924static void
925pm_compile_flip_flop(const pm_flip_flop_node_t *flip_flop_node, LABEL *else_label, LABEL *then_label, rb_iseq_t *iseq, const int lineno, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
926{
927 const pm_node_location_t location = { .line = lineno, .node_id = -1 };
928 LABEL *lend = NEW_LABEL(location.line);
929
930 int again = !(flip_flop_node->base.flags & PM_RANGE_FLAGS_EXCLUDE_END);
931
932 rb_num_t count = ISEQ_FLIP_CNT_INCREMENT(ISEQ_BODY(iseq)->local_iseq) + VM_SVAR_FLIPFLOP_START;
933 VALUE key = INT2FIX(count);
934
935 PUSH_INSN2(ret, location, getspecial, key, INT2FIX(0));
936 PUSH_INSNL(ret, location, branchif, lend);
937
938 if (flip_flop_node->left) {
939 pm_compile_flip_flop_bound(iseq, flip_flop_node->left, ret, popped, scope_node);
940 }
941 else {
942 PUSH_INSN(ret, location, putnil);
943 }
944
945 PUSH_INSNL(ret, location, branchunless, else_label);
946 PUSH_INSN1(ret, location, putobject, Qtrue);
947 PUSH_INSN1(ret, location, setspecial, key);
948 if (!again) {
949 PUSH_INSNL(ret, location, jump, then_label);
950 }
951
952 PUSH_LABEL(ret, lend);
953 if (flip_flop_node->right) {
954 pm_compile_flip_flop_bound(iseq, flip_flop_node->right, ret, popped, scope_node);
955 }
956 else {
957 PUSH_INSN(ret, location, putnil);
958 }
959
960 PUSH_INSNL(ret, location, branchunless, then_label);
961 PUSH_INSN1(ret, location, putobject, Qfalse);
962 PUSH_INSN1(ret, location, setspecial, key);
963 PUSH_INSNL(ret, location, jump, then_label);
964}
965
966static void pm_compile_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition);
967
968static void
969pm_compile_branch_condition(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const pm_node_t *cond, LABEL *then_label, LABEL *else_label, bool popped, pm_scope_node_t *scope_node)
970{
971 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, cond);
972
973again:
974 switch (PM_NODE_TYPE(cond)) {
975 case PM_AND_NODE: {
976 const pm_and_node_t *cast = (const pm_and_node_t *) cond;
977 pm_compile_logical(iseq, ret, cast->left, NULL, else_label, popped, scope_node);
978
979 cond = cast->right;
980 goto again;
981 }
982 case PM_OR_NODE: {
983 const pm_or_node_t *cast = (const pm_or_node_t *) cond;
984 pm_compile_logical(iseq, ret, cast->left, then_label, NULL, popped, scope_node);
985
986 cond = cast->right;
987 goto again;
988 }
989 case PM_FALSE_NODE:
990 case PM_NIL_NODE:
991 PUSH_INSNL(ret, location, jump, else_label);
992 return;
993 case PM_FLOAT_NODE:
994 case PM_IMAGINARY_NODE:
995 case PM_INTEGER_NODE:
996 case PM_LAMBDA_NODE:
997 case PM_RATIONAL_NODE:
998 case PM_REGULAR_EXPRESSION_NODE:
999 case PM_STRING_NODE:
1000 case PM_SYMBOL_NODE:
1001 case PM_TRUE_NODE:
1002 PUSH_INSNL(ret, location, jump, then_label);
1003 return;
1004 case PM_FLIP_FLOP_NODE:
1005 pm_compile_flip_flop((const pm_flip_flop_node_t *) cond, else_label, then_label, iseq, location.line, ret, popped, scope_node);
1006 return;
1007 case PM_DEFINED_NODE: {
1008 const pm_defined_node_t *cast = (const pm_defined_node_t *) cond;
1009 pm_compile_defined_expr(iseq, cast->value, &location, ret, popped, scope_node, true);
1010 break;
1011 }
1012 default: {
1013 DECL_ANCHOR(cond_seq);
1014 pm_compile_node(iseq, cond, cond_seq, false, scope_node);
1015
1016 if (LIST_INSN_SIZE_ONE(cond_seq)) {
1017 INSN *insn = (INSN *) ELEM_FIRST_INSN(FIRST_ELEMENT(cond_seq));
1018
1019 if (insn->insn_id == BIN(putobject)) {
1020 if (RTEST(insn->operands[0])) {
1021 PUSH_INSNL(ret, location, jump, then_label);
1022 // maybe unreachable
1023 return;
1024 }
1025 else {
1026 PUSH_INSNL(ret, location, jump, else_label);
1027 return;
1028 }
1029 }
1030 }
1031
1032 PUSH_SEQ(ret, cond_seq);
1033 break;
1034 }
1035 }
1036
1037 PUSH_INSNL(ret, location, branchunless, else_label);
1038 PUSH_INSNL(ret, location, jump, then_label);
1039}
1040
1044static void
1045pm_compile_conditional(rb_iseq_t *iseq, const pm_node_location_t *node_location, pm_node_type_t type, const pm_node_t *node, const pm_statements_node_t *statements, const pm_node_t *subsequent, const pm_node_t *predicate, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1046{
1047 const pm_node_location_t location = *node_location;
1048 LABEL *then_label = NEW_LABEL(location.line);
1049 LABEL *else_label = NEW_LABEL(location.line);
1050 LABEL *end_label = NULL;
1051
1052 DECL_ANCHOR(cond_seq);
1053 pm_compile_branch_condition(iseq, cond_seq, predicate, then_label, else_label, false, scope_node);
1054 PUSH_SEQ(ret, cond_seq);
1055
1056 rb_code_location_t conditional_location = { 0 };
1057 VALUE branches = Qfalse;
1058
1059 if (then_label->refcnt && else_label->refcnt && PM_BRANCH_COVERAGE_P(iseq)) {
1060 conditional_location = pm_code_location(scope_node, node);
1061 branches = decl_branch_base(iseq, PTR2NUM(node), &conditional_location, type == PM_IF_NODE ? "if" : "unless");
1062 }
1063
1064 if (then_label->refcnt) {
1065 PUSH_LABEL(ret, then_label);
1066
1067 DECL_ANCHOR(then_seq);
1068
1069 if (statements != NULL) {
1070 pm_compile_node(iseq, (const pm_node_t *) statements, then_seq, popped, scope_node);
1071 }
1072 else if (!popped) {
1073 PUSH_SYNTHETIC_PUTNIL(then_seq, iseq);
1074 }
1075
1076 if (else_label->refcnt) {
1077 // Establish branch coverage for the then block.
1078 if (PM_BRANCH_COVERAGE_P(iseq)) {
1079 rb_code_location_t branch_location;
1080
1081 if (statements != NULL) {
1082 branch_location = pm_code_location(scope_node, (const pm_node_t *) statements);
1083 } else if (type == PM_IF_NODE) {
1084 pm_line_column_t predicate_end = PM_NODE_END_LINE_COLUMN(scope_node->parser, predicate);
1085 branch_location = (rb_code_location_t) {
1086 .beg_pos = { .lineno = predicate_end.line, .column = predicate_end.column },
1087 .end_pos = { .lineno = predicate_end.line, .column = predicate_end.column }
1088 };
1089 } else {
1090 branch_location = conditional_location;
1091 }
1092
1093 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 0, type == PM_IF_NODE ? "then" : "else", branches);
1094 }
1095
1096 end_label = NEW_LABEL(location.line);
1097 PUSH_INSNL(then_seq, location, jump, end_label);
1098 if (!popped) PUSH_INSN(then_seq, location, pop);
1099 }
1100
1101 PUSH_SEQ(ret, then_seq);
1102 }
1103
1104 if (else_label->refcnt) {
1105 PUSH_LABEL(ret, else_label);
1106
1107 DECL_ANCHOR(else_seq);
1108
1109 if (subsequent != NULL) {
1110 pm_compile_node(iseq, subsequent, else_seq, popped, scope_node);
1111 }
1112 else if (!popped) {
1113 PUSH_SYNTHETIC_PUTNIL(else_seq, iseq);
1114 }
1115
1116 // Establish branch coverage for the else block.
1117 if (then_label->refcnt && PM_BRANCH_COVERAGE_P(iseq)) {
1118 rb_code_location_t branch_location;
1119
1120 if (subsequent == NULL) {
1121 branch_location = conditional_location;
1122 } else if (PM_NODE_TYPE_P(subsequent, PM_ELSE_NODE)) {
1123 const pm_else_node_t *else_node = (const pm_else_node_t *) subsequent;
1124 branch_location = pm_code_location(scope_node, else_node->statements != NULL ? ((const pm_node_t *) else_node->statements) : (const pm_node_t *) else_node);
1125 } else {
1126 branch_location = pm_code_location(scope_node, (const pm_node_t *) subsequent);
1127 }
1128
1129 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 1, type == PM_IF_NODE ? "else" : "then", branches);
1130 }
1131
1132 PUSH_SEQ(ret, else_seq);
1133 }
1134
1135 if (end_label) {
1136 PUSH_LABEL(ret, end_label);
1137 }
1138
1139 return;
1140}
1141
1145static void
1146pm_compile_loop(rb_iseq_t *iseq, const pm_node_location_t *node_location, pm_node_flags_t flags, enum pm_node_type type, const pm_node_t *node, const pm_statements_node_t *statements, const pm_node_t *predicate, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1147{
1148 const pm_node_location_t location = *node_location;
1149
1150 LABEL *prev_start_label = ISEQ_COMPILE_DATA(iseq)->start_label;
1151 LABEL *prev_end_label = ISEQ_COMPILE_DATA(iseq)->end_label;
1152 LABEL *prev_redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label;
1153
1154 LABEL *next_label = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(location.line); /* next */
1155 LABEL *redo_label = ISEQ_COMPILE_DATA(iseq)->redo_label = NEW_LABEL(location.line); /* redo */
1156 LABEL *break_label = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(location.line); /* break */
1157 LABEL *end_label = NEW_LABEL(location.line);
1158 LABEL *adjust_label = NEW_LABEL(location.line);
1159
1160 LABEL *next_catch_label = NEW_LABEL(location.line);
1161 LABEL *tmp_label = NULL;
1162
1163 // We're pushing onto the ensure stack because breaks need to break out of
1164 // this loop and not break into the ensure statements within the same
1165 // lexical scope.
1167 push_ensure_entry(iseq, &enl, NULL, NULL);
1168
1169 // begin; end while true
1170 if (flags & PM_LOOP_FLAGS_BEGIN_MODIFIER) {
1171 tmp_label = NEW_LABEL(location.line);
1172 PUSH_INSNL(ret, location, jump, tmp_label);
1173 }
1174 else {
1175 // while true; end
1176 PUSH_INSNL(ret, location, jump, next_label);
1177 }
1178
1179 PUSH_LABEL(ret, adjust_label);
1180 PUSH_INSN(ret, location, putnil);
1181 PUSH_LABEL(ret, next_catch_label);
1182 PUSH_INSN(ret, location, pop);
1183 PUSH_INSNL(ret, location, jump, next_label);
1184 if (tmp_label) PUSH_LABEL(ret, tmp_label);
1185
1186 PUSH_LABEL(ret, redo_label);
1187
1188 // Establish branch coverage for the loop.
1189 if (PM_BRANCH_COVERAGE_P(iseq)) {
1190 rb_code_location_t loop_location = pm_code_location(scope_node, node);
1191 VALUE branches = decl_branch_base(iseq, PTR2NUM(node), &loop_location, type == PM_WHILE_NODE ? "while" : "until");
1192
1193 rb_code_location_t branch_location = statements != NULL ? pm_code_location(scope_node, (const pm_node_t *) statements) : loop_location;
1194 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, 0, "body", branches);
1195 }
1196
1197 if (statements != NULL) PM_COMPILE_POPPED((const pm_node_t *) statements);
1198 PUSH_LABEL(ret, next_label);
1199
1200 if (type == PM_WHILE_NODE) {
1201 pm_compile_branch_condition(iseq, ret, predicate, redo_label, end_label, popped, scope_node);
1202 }
1203 else if (type == PM_UNTIL_NODE) {
1204 pm_compile_branch_condition(iseq, ret, predicate, end_label, redo_label, popped, scope_node);
1205 }
1206
1207 PUSH_LABEL(ret, end_label);
1208 PUSH_ADJUST_RESTORE(ret, adjust_label);
1209 PUSH_INSN(ret, location, putnil);
1210
1211 PUSH_LABEL(ret, break_label);
1212 if (popped) PUSH_INSN(ret, location, pop);
1213
1214 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, redo_label, break_label, NULL, break_label);
1215 PUSH_CATCH_ENTRY(CATCH_TYPE_NEXT, redo_label, break_label, NULL, next_catch_label);
1216 PUSH_CATCH_ENTRY(CATCH_TYPE_REDO, redo_label, break_label, NULL, ISEQ_COMPILE_DATA(iseq)->redo_label);
1217
1218 ISEQ_COMPILE_DATA(iseq)->start_label = prev_start_label;
1219 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end_label;
1220 ISEQ_COMPILE_DATA(iseq)->redo_label = prev_redo_label;
1221 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->prev;
1222
1223 return;
1224}
1225
1226// This recurses through scopes and finds the local index at any scope level
1227// It also takes a pointer to depth, and increments depth appropriately
1228// according to the depth of the local.
1229static pm_local_index_t
1230pm_lookup_local_index(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, pm_constant_id_t constant_id, int start_depth)
1231{
1232 pm_local_index_t lindex = { 0 };
1233 st_data_t local_index;
1234
1235 int level;
1236 for (level = 0; level < start_depth; level++) {
1237 scope_node = scope_node->previous;
1238 }
1239
1240 while (!st_lookup(scope_node->index_lookup_table, constant_id, &local_index)) {
1241 level++;
1242
1243 if (scope_node->previous) {
1244 scope_node = scope_node->previous;
1245 }
1246 else {
1247 // We have recursed up all scope nodes
1248 // and have not found the local yet
1249 rb_bug("Local with constant_id %u does not exist", (unsigned int) constant_id);
1250 }
1251 }
1252
1253 lindex.level = level;
1254 lindex.index = scope_node->local_table_for_iseq_size - (int) local_index;
1255 return lindex;
1256}
1257
1258// This returns the CRuby ID which maps to the pm_constant_id_t
1259//
1260// Constant_ids in prism are indexes of the constants in prism's constant pool.
1261// We add a constants mapping on the scope_node which is a mapping from
1262// these constant_id indexes to the CRuby IDs that they represent.
1263// This helper method allows easy access to those IDs
1264static ID
1265pm_constant_id_lookup(const pm_scope_node_t *scope_node, pm_constant_id_t constant_id)
1266{
1267 if (constant_id < 1 || constant_id > scope_node->parser->constant_pool.size) {
1268 rb_bug("constant_id out of range: %u", (unsigned int)constant_id);
1269 }
1270 return scope_node->constants[constant_id - 1];
1271}
1272
1273static rb_iseq_t *
1274pm_new_child_iseq(rb_iseq_t *iseq, pm_scope_node_t *node, VALUE name, const rb_iseq_t *parent, enum rb_iseq_type type, int line_no)
1275{
1276 debugs("[new_child_iseq]> ---------------------------------------\n");
1277 int isolated_depth = ISEQ_COMPILE_DATA(iseq)->isolated_depth;
1278 int error_state;
1279 rb_iseq_t *ret_iseq = pm_iseq_new_with_opt(node, name,
1280 rb_iseq_path(iseq), rb_iseq_realpath(iseq),
1281 line_no, parent,
1282 isolated_depth ? isolated_depth + 1 : 0,
1283 type, ISEQ_COMPILE_DATA(iseq)->option, &error_state);
1284
1285 if (error_state) {
1286 pm_scope_node_destroy(node);
1287 RUBY_ASSERT(ret_iseq == NULL);
1288 rb_jump_tag(error_state);
1289 }
1290 debugs("[new_child_iseq]< ---------------------------------------\n");
1291 return ret_iseq;
1292}
1293
1294static int
1295pm_compile_class_path(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1296{
1297 if (PM_NODE_TYPE_P(node, PM_CONSTANT_PATH_NODE)) {
1298 const pm_node_t *parent = ((const pm_constant_path_node_t *) node)->parent;
1299
1300 if (parent) {
1301 /* Bar::Foo */
1302 PM_COMPILE(parent);
1303 return VM_DEFINECLASS_FLAG_SCOPED;
1304 }
1305 else {
1306 /* toplevel class ::Foo */
1307 PUSH_INSN1(ret, *node_location, putobject, rb_cObject);
1308 return VM_DEFINECLASS_FLAG_SCOPED;
1309 }
1310 }
1311 else {
1312 /* class at cbase Foo */
1313 PUSH_INSN1(ret, *node_location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
1314 return 0;
1315 }
1316}
1317
1322static void
1323pm_compile_call_and_or_write_node(rb_iseq_t *iseq, bool and_node, const pm_node_t *receiver, const pm_node_t *value, pm_constant_id_t write_name, pm_constant_id_t read_name, bool safe_nav, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
1324{
1325 const pm_node_location_t location = *node_location;
1326 LABEL *lfin = NEW_LABEL(location.line);
1327 LABEL *lcfin = NEW_LABEL(location.line);
1328 LABEL *lskip = NULL;
1329
1330 int flag = PM_NODE_TYPE_P(receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
1331 ID id_read_name = pm_constant_id_lookup(scope_node, read_name);
1332
1333 PM_COMPILE_NOT_POPPED(receiver);
1334 if (safe_nav) {
1335 lskip = NEW_LABEL(location.line);
1336 PUSH_INSN(ret, location, dup);
1337 PUSH_INSNL(ret, location, branchnil, lskip);
1338 }
1339
1340 PUSH_INSN(ret, location, dup);
1341 PUSH_SEND_WITH_FLAG(ret, location, id_read_name, INT2FIX(0), INT2FIX(flag));
1342 if (!popped) PUSH_INSN(ret, location, dup);
1343
1344 if (and_node) {
1345 PUSH_INSNL(ret, location, branchunless, lcfin);
1346 }
1347 else {
1348 PUSH_INSNL(ret, location, branchif, lcfin);
1349 }
1350
1351 if (!popped) PUSH_INSN(ret, location, pop);
1352 PM_COMPILE_NOT_POPPED(value);
1353
1354 if (!popped) {
1355 PUSH_INSN(ret, location, swap);
1356 PUSH_INSN1(ret, location, topn, INT2FIX(1));
1357 }
1358
1359 ID id_write_name = pm_constant_id_lookup(scope_node, write_name);
1360 PUSH_SEND_WITH_FLAG(ret, location, id_write_name, INT2FIX(1), INT2FIX(flag));
1361 PUSH_INSNL(ret, location, jump, lfin);
1362
1363 PUSH_LABEL(ret, lcfin);
1364 if (!popped) PUSH_INSN(ret, location, swap);
1365
1366 PUSH_LABEL(ret, lfin);
1367
1368 if (lskip && popped) PUSH_LABEL(ret, lskip);
1369 PUSH_INSN(ret, location, pop);
1370 if (lskip && !popped) PUSH_LABEL(ret, lskip);
1371}
1372
1373static void pm_compile_shareable_constant_value(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_flags_t shareability, VALUE path, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, bool top);
1374
1380static void
1381pm_compile_hash_elements(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_list_t *elements, const pm_node_flags_t shareability, VALUE path, bool argument, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node)
1382{
1383 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
1384
1385 // If this element is not popped, then we need to create the hash on the
1386 // stack. Neighboring plain assoc nodes should be grouped together (either
1387 // by newhash or hash merge). Double splat nodes should be merged using the
1388 // merge_kwd method call.
1389 const int max_stack_length = 0x100;
1390 const unsigned int min_tmp_hash_length = 0x800;
1391
1392 int stack_length = 0;
1393 bool first_chunk = true;
1394
1395 // This is an optimization wherein we keep track of whether or not the
1396 // previous element was a static literal. If it was, then we do not attempt
1397 // to check if we have a subhash that can be optimized. If it was not, then
1398 // we do check.
1399 bool static_literal = false;
1400
1401 DECL_ANCHOR(anchor);
1402
1403 // Convert pushed elements to a hash, and merge if needed.
1404#define FLUSH_CHUNK \
1405 if (stack_length) { \
1406 if (first_chunk) { \
1407 PUSH_SEQ(ret, anchor); \
1408 PUSH_INSN1(ret, location, newhash, INT2FIX(stack_length)); \
1409 first_chunk = false; \
1410 } \
1411 else { \
1412 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE)); \
1413 PUSH_INSN(ret, location, swap); \
1414 PUSH_SEQ(ret, anchor); \
1415 PUSH_SEND(ret, location, id_core_hash_merge_ptr, INT2FIX(stack_length + 1)); \
1416 } \
1417 INIT_ANCHOR(anchor); \
1418 stack_length = 0; \
1419 }
1420
1421 for (size_t index = 0; index < elements->size; index++) {
1422 const pm_node_t *element = elements->nodes[index];
1423
1424 switch (PM_NODE_TYPE(element)) {
1425 case PM_ASSOC_NODE: {
1426 // Pre-allocation check (this branch can be omitted).
1427 if (
1428 (shareability == 0) &&
1429 PM_NODE_FLAG_P(element, PM_NODE_FLAG_STATIC_LITERAL) && (
1430 (!static_literal && ((index + min_tmp_hash_length) < elements->size)) ||
1431 (first_chunk && stack_length == 0)
1432 )
1433 ) {
1434 // Count the elements that are statically-known.
1435 size_t count = 1;
1436 while (index + count < elements->size && PM_NODE_FLAG_P(elements->nodes[index + count], PM_NODE_FLAG_STATIC_LITERAL)) count++;
1437
1438 if ((first_chunk && stack_length == 0) || count >= min_tmp_hash_length) {
1439 // The subsequence of elements in this hash is long enough
1440 // to merit its own hash.
1441 VALUE ary = rb_ary_hidden_new(count);
1442
1443 // Create a hidden hash.
1444 for (size_t tmp_end = index + count; index < tmp_end; index++) {
1445 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[index];
1446
1447 VALUE elem[2] = {
1448 pm_static_literal_value(iseq, assoc->key, scope_node),
1449 pm_static_literal_value(iseq, assoc->value, scope_node)
1450 };
1451
1452 rb_ary_cat(ary, elem, 2);
1453 }
1454 index --;
1455
1456 VALUE hash = rb_hash_new_with_size(RARRAY_LEN(ary) / 2);
1457 rb_hash_bulk_insert(RARRAY_LEN(ary), RARRAY_CONST_PTR(ary), hash);
1458 hash = rb_obj_hide(hash);
1459 RB_OBJ_SET_FROZEN_SHAREABLE(hash);
1460
1461 // Emit optimized code.
1462 FLUSH_CHUNK;
1463 if (first_chunk) {
1464 PUSH_INSN1(ret, location, duphash, hash);
1465 first_chunk = false;
1466 }
1467 else {
1468 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1469 PUSH_INSN(ret, location, swap);
1470 PUSH_INSN1(ret, location, putobject, hash);
1471 PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2));
1472 }
1473
1474 break;
1475 }
1476 else {
1477 static_literal = true;
1478 }
1479 }
1480 else {
1481 static_literal = false;
1482 }
1483
1484 // If this is a plain assoc node, then we can compile it directly
1485 // and then add the total number of values on the stack.
1486 if (shareability == 0) {
1487 pm_compile_node(iseq, element, anchor, false, scope_node);
1488 }
1489 else {
1490 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
1491 pm_compile_shareable_constant_value(iseq, assoc->key, shareability, path, ret, scope_node, false);
1492 pm_compile_shareable_constant_value(iseq, assoc->value, shareability, path, ret, scope_node, false);
1493 }
1494
1495 if ((stack_length += 2) >= max_stack_length) FLUSH_CHUNK;
1496 break;
1497 }
1498 case PM_ASSOC_SPLAT_NODE: {
1499 FLUSH_CHUNK;
1500
1501 const pm_assoc_splat_node_t *assoc_splat = (const pm_assoc_splat_node_t *) element;
1502 bool empty_hash = assoc_splat->value != NULL && (
1503 (PM_NODE_TYPE_P(assoc_splat->value, PM_HASH_NODE) && ((const pm_hash_node_t *) assoc_splat->value)->elements.size == 0) ||
1504 PM_NODE_TYPE_P(assoc_splat->value, PM_NIL_NODE)
1505 );
1506
1507 bool first_element = first_chunk && stack_length == 0;
1508 bool last_element = index == elements->size - 1;
1509 bool only_element = first_element && last_element;
1510
1511 if (empty_hash) {
1512 if (only_element && argument) {
1513 // **{} appears at the only keyword argument in method call,
1514 // so it won't be modified.
1515 //
1516 // This is only done for method calls and not for literal
1517 // hashes, because literal hashes should always result in a
1518 // new hash.
1519 PUSH_INSN(ret, location, putnil);
1520 }
1521 else if (first_element) {
1522 // **{} appears as the first keyword argument, so it may be
1523 // modified. We need to create a fresh hash object.
1524 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
1525 }
1526 // Any empty keyword splats that are not the first can be
1527 // ignored since merging an empty hash into the existing hash is
1528 // the same as not merging it.
1529 }
1530 else {
1531 if (only_element && argument) {
1532 // ** is only keyword argument in the method call. Use it
1533 // directly. This will be not be flagged as mutable. This is
1534 // only done for method calls and not for literal hashes,
1535 // because literal hashes should always result in a new
1536 // hash.
1537 if (shareability == 0) {
1538 PM_COMPILE_NOT_POPPED(element);
1539 }
1540 else {
1541 pm_compile_shareable_constant_value(iseq, element, shareability, path, ret, scope_node, false);
1542 }
1543 }
1544 else {
1545 // There is more than one keyword argument, or this is not a
1546 // method call. In that case, we need to add an empty hash
1547 // (if first keyword), or merge the hash to the accumulated
1548 // hash (if not the first keyword).
1549 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1550
1551 if (first_element) {
1552 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
1553 }
1554 else {
1555 PUSH_INSN(ret, location, swap);
1556 }
1557
1558 if (shareability == 0) {
1559 PM_COMPILE_NOT_POPPED(element);
1560 }
1561 else {
1562 pm_compile_shareable_constant_value(iseq, element, shareability, path, ret, scope_node, false);
1563 }
1564
1565 PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2));
1566 }
1567 }
1568
1569 first_chunk = false;
1570 static_literal = false;
1571 break;
1572 }
1573 default:
1574 RUBY_ASSERT("Invalid node type for hash" && false);
1575 break;
1576 }
1577 }
1578
1579 FLUSH_CHUNK;
1580#undef FLUSH_CHUNK
1581}
1582
1583#define SPLATARRAY_FALSE 0
1584#define SPLATARRAY_TRUE 1
1585#define DUP_SINGLE_KW_SPLAT 2
1586
1587// This is details. Users should call pm_setup_args() instead.
1588static int
1589pm_setup_args_core(const pm_arguments_node_t *arguments_node, const pm_node_t *block, int *flags, const bool has_regular_blockarg, struct rb_callinfo_kwarg **kw_arg, int *dup_rest, rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_node_location_t *node_location)
1590{
1591 const pm_node_location_t location = *node_location;
1592
1593 int orig_argc = 0;
1594 bool has_splat = false;
1595 bool has_keyword_splat = false;
1596
1597 if (arguments_node == NULL) {
1598 if (*flags & VM_CALL_FCALL) {
1599 *flags |= VM_CALL_VCALL;
1600 }
1601 }
1602 else {
1603 const pm_node_list_t *arguments = &arguments_node->arguments;
1604 has_keyword_splat = PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_KEYWORD_SPLAT);
1605
1606 // We count the number of elements post the splat node that are not keyword elements to
1607 // eventually pass as an argument to newarray
1608 int post_splat_counter = 0;
1609 const pm_node_t *argument;
1610
1611 PM_NODE_LIST_FOREACH(arguments, index, argument) {
1612 switch (PM_NODE_TYPE(argument)) {
1613 // A keyword hash node contains all keyword arguments as AssocNodes and AssocSplatNodes
1614 case PM_KEYWORD_HASH_NODE: {
1615 const pm_keyword_hash_node_t *keyword_arg = (const pm_keyword_hash_node_t *) argument;
1616 const pm_node_list_t *elements = &keyword_arg->elements;
1617
1618 if (has_keyword_splat || has_splat) {
1619 *flags |= VM_CALL_KW_SPLAT;
1620 has_keyword_splat = true;
1621
1622 if (elements->size > 1 || !(elements->size == 1 && PM_NODE_TYPE_P(elements->nodes[0], PM_ASSOC_SPLAT_NODE))) {
1623 // A new hash will be created for the keyword arguments
1624 // in this case, so mark the method as passing mutable
1625 // keyword splat.
1626 *flags |= VM_CALL_KW_SPLAT_MUT;
1627 pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node);
1628 }
1629 else if (*dup_rest & DUP_SINGLE_KW_SPLAT) {
1630 *flags |= VM_CALL_KW_SPLAT_MUT;
1631 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
1632 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
1633 pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node);
1634 PUSH_SEND(ret, location, id_core_hash_merge_kwd, INT2FIX(2));
1635 }
1636 else {
1637 pm_compile_hash_elements(iseq, argument, elements, 0, Qundef, true, ret, scope_node);
1638 }
1639 }
1640 else {
1641 // We need to first figure out if all elements of the
1642 // KeywordHashNode are AssocNodes with symbol keys.
1643 if (PM_NODE_FLAG_P(keyword_arg, PM_KEYWORD_HASH_NODE_FLAGS_SYMBOL_KEYS)) {
1644 // If they are all symbol keys then we can pass them as
1645 // keyword arguments. The first thing we need to do is
1646 // deduplicate. We'll do this using the combination of a
1647 // Ruby hash and a Ruby array.
1648 VALUE stored_indices = rb_hash_new();
1649 VALUE keyword_indices = rb_ary_new_capa(elements->size);
1650
1651 size_t size = 0;
1652 for (size_t element_index = 0; element_index < elements->size; element_index++) {
1653 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1654
1655 // Retrieve the stored index from the hash for this
1656 // keyword.
1657 VALUE keyword = pm_static_literal_value(iseq, assoc->key, scope_node);
1658 VALUE stored_index = rb_hash_aref(stored_indices, keyword);
1659
1660 // If this keyword was already seen in the hash,
1661 // then mark the array at that index as false and
1662 // decrement the keyword size.
1663 if (!NIL_P(stored_index)) {
1664 rb_ary_store(keyword_indices, NUM2LONG(stored_index), Qfalse);
1665 size--;
1666 }
1667
1668 // Store (and possibly overwrite) the index for this
1669 // keyword in the hash, mark the array at that index
1670 // as true, and increment the keyword size.
1671 rb_hash_aset(stored_indices, keyword, ULONG2NUM(element_index));
1672 rb_ary_store(keyword_indices, (long) element_index, Qtrue);
1673 size++;
1674 }
1675
1676 *kw_arg = rb_xmalloc_mul_add(size, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
1677 *flags |= VM_CALL_KWARG;
1678
1679 VALUE *keywords = (*kw_arg)->keywords;
1680 (*kw_arg)->references = 0;
1681 (*kw_arg)->keyword_len = (int) size;
1682
1683 size_t keyword_index = 0;
1684 for (size_t element_index = 0; element_index < elements->size; element_index++) {
1685 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1686 bool popped = true;
1687
1688 if (rb_ary_entry(keyword_indices, (long) element_index) == Qtrue) {
1689 keywords[keyword_index++] = pm_static_literal_value(iseq, assoc->key, scope_node);
1690 popped = false;
1691 }
1692
1693 PM_COMPILE(assoc->value);
1694 }
1695
1696 RUBY_ASSERT(keyword_index == size);
1697 }
1698 else {
1699 // If they aren't all symbol keys then we need to
1700 // construct a new hash and pass that as an argument.
1701 orig_argc++;
1702 *flags |= VM_CALL_KW_SPLAT;
1703
1704 size_t size = elements->size;
1705 if (size > 1) {
1706 // A new hash will be created for the keyword
1707 // arguments in this case, so mark the method as
1708 // passing mutable keyword splat.
1709 *flags |= VM_CALL_KW_SPLAT_MUT;
1710 }
1711
1712 for (size_t element_index = 0; element_index < size; element_index++) {
1713 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) elements->nodes[element_index];
1714 PM_COMPILE_NOT_POPPED(assoc->key);
1715 PM_COMPILE_NOT_POPPED(assoc->value);
1716 }
1717
1718 PUSH_INSN1(ret, location, newhash, INT2FIX(size * 2));
1719 }
1720 }
1721 break;
1722 }
1723 case PM_SPLAT_NODE: {
1724 *flags |= VM_CALL_ARGS_SPLAT;
1725 const pm_splat_node_t *splat_node = (const pm_splat_node_t *) argument;
1726
1727 if (splat_node->expression) {
1728 PM_COMPILE_NOT_POPPED(splat_node->expression);
1729 }
1730 else {
1731 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
1732 PUSH_GETLOCAL(ret, location, index.index, index.level);
1733 }
1734
1735 bool first_splat = !has_splat;
1736
1737 if (first_splat) {
1738 // If this is the first splat array seen and it's not the
1739 // last parameter, we want splatarray to dup it.
1740 //
1741 // foo(a, *b, c)
1742 // ^^
1743 if (index + 1 < arguments->size || has_regular_blockarg) {
1744 PUSH_INSN1(ret, location, splatarray, (*dup_rest & SPLATARRAY_TRUE) ? Qtrue : Qfalse);
1745 if (*dup_rest & SPLATARRAY_TRUE) *dup_rest &= ~SPLATARRAY_TRUE;
1746 }
1747 // If this is the first spalt array seen and it's the last
1748 // parameter, we don't want splatarray to dup it.
1749 //
1750 // foo(a, *b)
1751 // ^^
1752 else {
1753 PUSH_INSN1(ret, location, splatarray, Qfalse);
1754 }
1755 }
1756 else {
1757 // If this is not the first splat array seen and it is also
1758 // the last parameter, we don't want splatarray to dup it
1759 // and we need to concat the array.
1760 //
1761 // foo(a, *b, *c)
1762 // ^^
1763 PUSH_INSN(ret, location, concattoarray);
1764 }
1765
1766 has_splat = true;
1767 post_splat_counter = 0;
1768
1769 break;
1770 }
1771 case PM_FORWARDING_ARGUMENTS_NODE: { // not counted in argc return value
1772 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
1773
1774 if (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
1775 *flags |= VM_CALL_FORWARDING;
1776
1777 pm_local_index_t mult_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_DOT3, 0);
1778 PUSH_GETLOCAL(ret, location, mult_local.index, mult_local.level);
1779
1780 break;
1781 }
1782
1783 if (has_splat) {
1784 // If we already have a splat, we're concatenating to existing array
1785 orig_argc += 1;
1786 } else {
1787 orig_argc += 2;
1788 }
1789
1790 *flags |= VM_CALL_ARGS_SPLAT | VM_CALL_ARGS_BLOCKARG | VM_CALL_KW_SPLAT;
1791
1792 // Forwarding arguments nodes are treated as foo(*, **, &)
1793 // So foo(...) equals foo(*, **, &) and as such the local
1794 // table for this method is known in advance
1795 //
1796 // Push the *
1797 pm_local_index_t mult_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
1798 PUSH_GETLOCAL(ret, location, mult_local.index, mult_local.level);
1799
1800 if (has_splat) {
1801 // If we already have a splat, we need to concatenate arrays
1802 PUSH_INSN(ret, location, concattoarray);
1803 } else {
1804 PUSH_INSN1(ret, location, splatarray, Qfalse);
1805 }
1806
1807 // Push the **
1808 pm_local_index_t pow_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_POW, 0);
1809 PUSH_GETLOCAL(ret, location, pow_local.index, pow_local.level);
1810
1811 // Push the &
1812 pm_local_index_t and_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_AND, 0);
1813 PUSH_INSN2(ret, location, getblockparamproxy, INT2FIX(and_local.index + VM_ENV_DATA_SIZE - 1), INT2FIX(and_local.level));
1814
1815 break;
1816 }
1817 default: {
1818 post_splat_counter++;
1819 PM_COMPILE_NOT_POPPED(argument);
1820
1821 // If we have a splat and we've seen a splat, we need to process
1822 // everything after the splat.
1823 if (has_splat) {
1824 // Stack items are turned into an array and concatenated in
1825 // the following cases:
1826 //
1827 // If the next node is a splat:
1828 //
1829 // foo(*a, b, *c)
1830 //
1831 // If the next node is a kwarg or kwarg splat:
1832 //
1833 // foo(*a, b, c: :d)
1834 // foo(*a, b, **c)
1835 //
1836 // If the next node is NULL (we have hit the end):
1837 //
1838 // foo(*a, b)
1839 if (index == arguments->size - 1) {
1840 RUBY_ASSERT(post_splat_counter > 0);
1841 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(post_splat_counter));
1842 }
1843 else {
1844 pm_node_t *next_arg = arguments->nodes[index + 1];
1845
1846 switch (PM_NODE_TYPE(next_arg)) {
1847 // A keyword hash node contains all keyword arguments as AssocNodes and AssocSplatNodes
1848 case PM_KEYWORD_HASH_NODE: {
1849 PUSH_INSN1(ret, location, newarray, INT2FIX(post_splat_counter));
1850 PUSH_INSN(ret, location, concatarray);
1851 break;
1852 }
1853 case PM_SPLAT_NODE: {
1854 PUSH_INSN1(ret, location, newarray, INT2FIX(post_splat_counter));
1855 PUSH_INSN(ret, location, concatarray);
1856 break;
1857 }
1858 default:
1859 break;
1860 }
1861 }
1862 }
1863 else {
1864 orig_argc++;
1865 }
1866 }
1867 }
1868 }
1869 }
1870
1871 if (has_splat) orig_argc++;
1872 if (has_keyword_splat) orig_argc++;
1873 return orig_argc;
1874}
1875
1880static inline bool
1881pm_setup_args_dup_rest_p(const pm_node_t *node)
1882{
1883 switch (PM_NODE_TYPE(node)) {
1884 case PM_BACK_REFERENCE_READ_NODE:
1885 case PM_CLASS_VARIABLE_READ_NODE:
1886 case PM_CONSTANT_READ_NODE:
1887 case PM_FALSE_NODE:
1888 case PM_FLOAT_NODE:
1889 case PM_GLOBAL_VARIABLE_READ_NODE:
1890 case PM_IMAGINARY_NODE:
1891 case PM_INSTANCE_VARIABLE_READ_NODE:
1892 case PM_INTEGER_NODE:
1893 case PM_LAMBDA_NODE:
1894 case PM_LOCAL_VARIABLE_READ_NODE:
1895 case PM_NIL_NODE:
1896 case PM_NUMBERED_REFERENCE_READ_NODE:
1897 case PM_RATIONAL_NODE:
1898 case PM_REGULAR_EXPRESSION_NODE:
1899 case PM_SELF_NODE:
1900 case PM_STRING_NODE:
1901 case PM_SYMBOL_NODE:
1902 case PM_TRUE_NODE:
1903 return false;
1904 case PM_CONSTANT_PATH_NODE: {
1905 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
1906 if (cast->parent != NULL) {
1907 return pm_setup_args_dup_rest_p(cast->parent);
1908 }
1909 return false;
1910 }
1911 case PM_IMPLICIT_NODE:
1912 return pm_setup_args_dup_rest_p(((const pm_implicit_node_t *) node)->value);
1913 case PM_ARRAY_NODE: {
1914 const pm_array_node_t *cast = (const pm_array_node_t *) node;
1915 for (size_t index = 0; index < cast->elements.size; index++) {
1916 if (pm_setup_args_dup_rest_p(cast->elements.nodes[index])) {
1917 return true;
1918 }
1919 }
1920 return false;
1921 }
1922 default:
1923 return true;
1924 }
1925}
1926
1930static int
1931pm_setup_args(const pm_arguments_node_t *arguments_node, const pm_node_t *block, int *flags, struct rb_callinfo_kwarg **kw_arg, rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_node_location_t *node_location)
1932{
1933 int dup_rest = SPLATARRAY_TRUE;
1934
1935 const pm_node_list_t *arguments;
1936 size_t arguments_size;
1937
1938 // Calls like foo(1, *f, **hash) that use splat and kwsplat could be
1939 // eligible for eliding duping the rest array (dup_reset=false).
1940 if (
1941 arguments_node != NULL &&
1942 (arguments = &arguments_node->arguments, arguments_size = arguments->size) >= 2 &&
1943 PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_SPLAT) &&
1944 !PM_NODE_FLAG_P(arguments_node, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_MULTIPLE_SPLATS) &&
1945 PM_NODE_TYPE_P(arguments->nodes[arguments_size - 1], PM_KEYWORD_HASH_NODE)
1946 ) {
1947 // Start by assuming that dup_rest=false, then check each element of the
1948 // hash to ensure we don't need to flip it back to true (in case one of
1949 // the elements could potentially mutate the array).
1950 dup_rest = SPLATARRAY_FALSE;
1951
1952 const pm_keyword_hash_node_t *keyword_hash = (const pm_keyword_hash_node_t *) arguments->nodes[arguments_size - 1];
1953 const pm_node_list_t *elements = &keyword_hash->elements;
1954
1955 for (size_t index = 0; dup_rest == SPLATARRAY_FALSE && index < elements->size; index++) {
1956 const pm_node_t *element = elements->nodes[index];
1957
1958 switch (PM_NODE_TYPE(element)) {
1959 case PM_ASSOC_NODE: {
1960 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
1961 if (pm_setup_args_dup_rest_p(assoc->key) || pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE;
1962 break;
1963 }
1964 case PM_ASSOC_SPLAT_NODE: {
1965 const pm_assoc_splat_node_t *assoc = (const pm_assoc_splat_node_t *) element;
1966 if (assoc->value != NULL && pm_setup_args_dup_rest_p(assoc->value)) dup_rest = SPLATARRAY_TRUE;
1967 break;
1968 }
1969 default:
1970 break;
1971 }
1972 }
1973 }
1974
1975 int initial_dup_rest = dup_rest;
1976 int argc;
1977
1978 if (block && PM_NODE_TYPE_P(block, PM_BLOCK_ARGUMENT_NODE)) {
1979 // We compile the `&block_arg` expression first and stitch it later
1980 // since the nature of the expression influences whether splat should
1981 // duplicate the array.
1982 bool regular_block_arg = true;
1983 const pm_node_t *block_expr = ((const pm_block_argument_node_t *)block)->expression;
1984
1985 if (block_expr && pm_setup_args_dup_rest_p(block_expr)) {
1986 dup_rest = SPLATARRAY_TRUE | DUP_SINGLE_KW_SPLAT;
1987 initial_dup_rest = dup_rest;
1988 }
1989
1990 DECL_ANCHOR(block_arg);
1991 pm_compile_node(iseq, block, block_arg, false, scope_node);
1992
1993 *flags |= VM_CALL_ARGS_BLOCKARG;
1994
1995 if (LIST_INSN_SIZE_ONE(block_arg)) {
1996 LINK_ELEMENT *elem = FIRST_ELEMENT(block_arg);
1997 if (IS_INSN(elem)) {
1998 INSN *iobj = (INSN *) elem;
1999 if (iobj->insn_id == BIN(getblockparam)) {
2000 iobj->insn_id = BIN(getblockparamproxy);
2001 }
2002
2003 // Allow splat without duplication for simple one-instruction
2004 // block arguments like `&arg`. It is known that this
2005 // optimization can be too aggressive in some cases. See
2006 // [Bug #16504].
2007 regular_block_arg = false;
2008 }
2009 }
2010
2011 argc = pm_setup_args_core(arguments_node, block, flags, regular_block_arg, kw_arg, &dup_rest, iseq, ret, scope_node, node_location);
2012 PUSH_SEQ(ret, block_arg);
2013 }
2014 else {
2015 argc = pm_setup_args_core(arguments_node, block, flags, false, kw_arg, &dup_rest, iseq, ret, scope_node, node_location);
2016 }
2017
2018 // If the dup_rest flag was consumed while compiling the arguments (which
2019 // effectively means we found the splat node), then it would have changed
2020 // during the call to pm_setup_args_core. In this case, we want to add the
2021 // VM_CALL_ARGS_SPLAT_MUT flag.
2022 if (*flags & VM_CALL_ARGS_SPLAT && dup_rest != initial_dup_rest) {
2023 *flags |= VM_CALL_ARGS_SPLAT_MUT;
2024 }
2025
2026 return argc;
2027}
2028
2039static void
2040pm_compile_index_operator_write_node(rb_iseq_t *iseq, const pm_index_operator_write_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
2041{
2042 const pm_node_location_t location = *node_location;
2043 if (!popped) PUSH_INSN(ret, location, putnil);
2044
2045 PM_COMPILE_NOT_POPPED(node->receiver);
2046
2047 int boff = (node->block == NULL ? 0 : 1);
2048 int flag = PM_NODE_TYPE_P(node->receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
2049 struct rb_callinfo_kwarg *keywords = NULL;
2050 int argc = pm_setup_args(node->arguments, (const pm_node_t *) node->block, &flag, &keywords, iseq, ret, scope_node, node_location);
2051
2052 if ((argc > 0 || boff) && (flag & VM_CALL_KW_SPLAT)) {
2053 if (boff) {
2054 PUSH_INSN(ret, location, splatkw);
2055 }
2056 else {
2057 PUSH_INSN(ret, location, dup);
2058 PUSH_INSN(ret, location, splatkw);
2059 PUSH_INSN(ret, location, pop);
2060 }
2061 }
2062
2063 int dup_argn = argc + 1 + boff;
2064 int keyword_len = 0;
2065
2066 if (keywords) {
2067 keyword_len = keywords->keyword_len;
2068 dup_argn += keyword_len;
2069 }
2070
2071 PUSH_INSN1(ret, location, dupn, INT2FIX(dup_argn));
2072 PUSH_SEND_R(ret, location, idAREF, INT2FIX(argc), NULL, INT2FIX(flag & ~(VM_CALL_ARGS_SPLAT_MUT | VM_CALL_KW_SPLAT_MUT)), keywords);
2073 PM_COMPILE_NOT_POPPED(node->value);
2074
2075 ID id_operator = pm_constant_id_lookup(scope_node, node->binary_operator);
2076 PUSH_SEND(ret, location, id_operator, INT2FIX(1));
2077
2078 if (!popped) {
2079 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
2080 }
2081 if (flag & VM_CALL_ARGS_SPLAT) {
2082 if (flag & VM_CALL_KW_SPLAT) {
2083 PUSH_INSN1(ret, location, topn, INT2FIX(2 + boff));
2084
2085 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
2086 PUSH_INSN1(ret, location, splatarray, Qtrue);
2087 flag |= VM_CALL_ARGS_SPLAT_MUT;
2088 }
2089
2090 PUSH_INSN(ret, location, swap);
2091 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
2092 PUSH_INSN1(ret, location, setn, INT2FIX(2 + boff));
2093 PUSH_INSN(ret, location, pop);
2094 }
2095 else {
2096 if (boff > 0) {
2097 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
2098 PUSH_INSN(ret, location, swap);
2099 PUSH_INSN(ret, location, pop);
2100 }
2101 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
2102 PUSH_INSN(ret, location, swap);
2103 PUSH_INSN1(ret, location, splatarray, Qtrue);
2104 PUSH_INSN(ret, location, swap);
2105 flag |= VM_CALL_ARGS_SPLAT_MUT;
2106 }
2107 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
2108 if (boff > 0) {
2109 PUSH_INSN1(ret, location, setn, INT2FIX(3));
2110 PUSH_INSN(ret, location, pop);
2111 PUSH_INSN(ret, location, pop);
2112 }
2113 }
2114
2115 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc), NULL, INT2FIX(flag), keywords);
2116 }
2117 else if (flag & VM_CALL_KW_SPLAT) {
2118 if (boff > 0) {
2119 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2120 PUSH_INSN(ret, location, swap);
2121 PUSH_INSN1(ret, location, setn, INT2FIX(3));
2122 PUSH_INSN(ret, location, pop);
2123 }
2124 PUSH_INSN(ret, location, swap);
2125 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2126 }
2127 else if (keyword_len) {
2128 PUSH_INSN(ret, location, dup);
2129 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 2));
2130 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 1));
2131 PUSH_INSN(ret, location, pop);
2132 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2133 }
2134 else {
2135 if (boff > 0) {
2136 PUSH_INSN(ret, location, swap);
2137 }
2138 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2139 }
2140
2141 PUSH_INSN(ret, location, pop);
2142}
2143
2156static void
2157pm_compile_index_control_flow_write_node(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_t *receiver, const pm_arguments_node_t *arguments, const pm_block_argument_node_t *block, const pm_node_t *value, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
2158{
2159 const pm_node_location_t location = *node_location;
2160 if (!popped) PUSH_INSN(ret, location, putnil);
2161 PM_COMPILE_NOT_POPPED(receiver);
2162
2163 int boff = (block == NULL ? 0 : 1);
2164 int flag = PM_NODE_TYPE_P(receiver, PM_SELF_NODE) ? VM_CALL_FCALL : 0;
2165 struct rb_callinfo_kwarg *keywords = NULL;
2166 int argc = pm_setup_args(arguments, (const pm_node_t *) block, &flag, &keywords, iseq, ret, scope_node, node_location);
2167
2168 if ((argc > 0 || boff) && (flag & VM_CALL_KW_SPLAT)) {
2169 if (boff) {
2170 PUSH_INSN(ret, location, splatkw);
2171 }
2172 else {
2173 PUSH_INSN(ret, location, dup);
2174 PUSH_INSN(ret, location, splatkw);
2175 PUSH_INSN(ret, location, pop);
2176 }
2177 }
2178
2179 int dup_argn = argc + 1 + boff;
2180 int keyword_len = 0;
2181
2182 if (keywords) {
2183 keyword_len = keywords->keyword_len;
2184 dup_argn += keyword_len;
2185 }
2186
2187 PUSH_INSN1(ret, location, dupn, INT2FIX(dup_argn));
2188 PUSH_SEND_R(ret, location, idAREF, INT2FIX(argc), NULL, INT2FIX(flag & ~(VM_CALL_ARGS_SPLAT_MUT | VM_CALL_KW_SPLAT_MUT)), keywords);
2189
2190 LABEL *label = NEW_LABEL(location.line);
2191 LABEL *lfin = NEW_LABEL(location.line);
2192
2193 PUSH_INSN(ret, location, dup);
2194 if (PM_NODE_TYPE_P(node, PM_INDEX_AND_WRITE_NODE)) {
2195 PUSH_INSNL(ret, location, branchunless, label);
2196 }
2197 else {
2198 PUSH_INSNL(ret, location, branchif, label);
2199 }
2200
2201 PUSH_INSN(ret, location, pop);
2202 PM_COMPILE_NOT_POPPED(value);
2203
2204 if (!popped) {
2205 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
2206 }
2207
2208 if (flag & VM_CALL_ARGS_SPLAT) {
2209 if (flag & VM_CALL_KW_SPLAT) {
2210 PUSH_INSN1(ret, location, topn, INT2FIX(2 + boff));
2211 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
2212 PUSH_INSN1(ret, location, splatarray, Qtrue);
2213 flag |= VM_CALL_ARGS_SPLAT_MUT;
2214 }
2215
2216 PUSH_INSN(ret, location, swap);
2217 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
2218 PUSH_INSN1(ret, location, setn, INT2FIX(2 + boff));
2219 PUSH_INSN(ret, location, pop);
2220 }
2221 else {
2222 if (boff > 0) {
2223 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
2224 PUSH_INSN(ret, location, swap);
2225 PUSH_INSN(ret, location, pop);
2226 }
2227 if (!(flag & VM_CALL_ARGS_SPLAT_MUT)) {
2228 PUSH_INSN(ret, location, swap);
2229 PUSH_INSN1(ret, location, splatarray, Qtrue);
2230 PUSH_INSN(ret, location, swap);
2231 flag |= VM_CALL_ARGS_SPLAT_MUT;
2232 }
2233 PUSH_INSN1(ret, location, pushtoarray, INT2FIX(1));
2234 if (boff > 0) {
2235 PUSH_INSN1(ret, location, setn, INT2FIX(3));
2236 PUSH_INSN(ret, location, pop);
2237 PUSH_INSN(ret, location, pop);
2238 }
2239 }
2240
2241 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc), NULL, INT2FIX(flag), keywords);
2242 }
2243 else if (flag & VM_CALL_KW_SPLAT) {
2244 if (boff > 0) {
2245 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2246 PUSH_INSN(ret, location, swap);
2247 PUSH_INSN1(ret, location, setn, INT2FIX(3));
2248 PUSH_INSN(ret, location, pop);
2249 }
2250
2251 PUSH_INSN(ret, location, swap);
2252 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2253 }
2254 else if (keyword_len) {
2255 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 1));
2256 PUSH_INSN1(ret, location, opt_reverse, INT2FIX(keyword_len + boff + 0));
2257 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2258 }
2259 else {
2260 if (boff > 0) {
2261 PUSH_INSN(ret, location, swap);
2262 }
2263 PUSH_SEND_R(ret, location, idASET, INT2FIX(argc + 1), NULL, INT2FIX(flag), keywords);
2264 }
2265
2266 PUSH_INSN(ret, location, pop);
2267 PUSH_INSNL(ret, location, jump, lfin);
2268 PUSH_LABEL(ret, label);
2269 if (!popped) {
2270 PUSH_INSN1(ret, location, setn, INT2FIX(dup_argn + 1));
2271 }
2272 PUSH_INSN1(ret, location, adjuststack, INT2FIX(dup_argn + 1));
2273 PUSH_LABEL(ret, lfin);
2274}
2275
2276// When we compile a pattern matching expression, we use the stack as a scratch
2277// space to store lots of different values (consider it like we have a pattern
2278// matching function and we need space for a bunch of different local
2279// variables). The "base index" refers to the index on the stack where we
2280// started compiling the pattern matching expression. These offsets from that
2281// base index indicate the location of the various locals we need.
2282#define PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE 0
2283#define PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING 1
2284#define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P 2
2285#define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE 3
2286#define PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY 4
2287
2288// A forward declaration because this is the recursive function that handles
2289// compiling a pattern. It can be reentered by nesting patterns, as in the case
2290// of arrays or hashes.
2291static int pm_compile_pattern(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *matched_label, LABEL *unmatched_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index);
2292
2297static int
2298pm_compile_pattern_generic_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, VALUE message, unsigned int base_index)
2299{
2300 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2301 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2302
2303 PUSH_INSN(ret, location, dup);
2304 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2305
2306 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2307 PUSH_INSN1(ret, location, putobject, message);
2308 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2309 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(2));
2310 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2311
2312 PUSH_INSN1(ret, location, putobject, Qfalse);
2313 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2314
2315 PUSH_INSN(ret, location, pop);
2316 PUSH_INSN(ret, location, pop);
2317 PUSH_LABEL(ret, match_succeeded_label);
2318
2319 return COMPILE_OK;
2320}
2321
2327static int
2328pm_compile_pattern_length_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, VALUE message, VALUE length, unsigned int base_index)
2329{
2330 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2331 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2332
2333 PUSH_INSN(ret, location, dup);
2334 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2335
2336 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2337 PUSH_INSN1(ret, location, putobject, message);
2338 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2339 PUSH_INSN(ret, location, dup);
2340 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2341 PUSH_INSN1(ret, location, putobject, length);
2342 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(4));
2343 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2344
2345 PUSH_INSN1(ret, location, putobject, Qfalse);
2346 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2347
2348 PUSH_INSN(ret, location, pop);
2349 PUSH_INSN(ret, location, pop);
2350 PUSH_LABEL(ret, match_succeeded_label);
2351
2352 return COMPILE_OK;
2353}
2354
2360static int
2361pm_compile_pattern_eqq_error(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, unsigned int base_index)
2362{
2363 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2364 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2365
2366 PUSH_INSN(ret, location, dup);
2367 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2368 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2369
2370 VALUE operand = rb_fstring_lit("%p === %p does not return true");
2371 PUSH_INSN1(ret, location, putobject, operand);
2372
2373 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2374 PUSH_INSN1(ret, location, topn, INT2FIX(5));
2375 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2376 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2377 PUSH_INSN1(ret, location, putobject, Qfalse);
2378 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2379 PUSH_INSN(ret, location, pop);
2380 PUSH_INSN(ret, location, pop);
2381
2382 PUSH_LABEL(ret, match_succeeded_label);
2383 PUSH_INSN1(ret, location, setn, INT2FIX(2));
2384 PUSH_INSN(ret, location, pop);
2385 PUSH_INSN(ret, location, pop);
2386
2387 return COMPILE_OK;
2388}
2389
2396static int
2397pm_compile_pattern_match(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *unmatched_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index)
2398{
2399 LABEL *matched_label = NEW_LABEL(pm_node_line_number(scope_node->parser, node));
2400 CHECK(pm_compile_pattern(iseq, scope_node, node, ret, matched_label, unmatched_label, in_single_pattern, use_deconstructed_cache, base_index));
2401 PUSH_LABEL(ret, matched_label);
2402 return COMPILE_OK;
2403}
2404
2410static int
2411pm_compile_pattern_deconstruct(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *deconstruct_label, LABEL *match_failed_label, LABEL *deconstructed_label, LABEL *type_error_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index)
2412{
2413 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2414
2415 if (use_deconstructed_cache) {
2416 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2417 PUSH_INSNL(ret, location, branchnil, deconstruct_label);
2418
2419 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2420 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2421
2422 PUSH_INSN(ret, location, pop);
2423 PUSH_INSN1(ret, location, topn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE - 1));
2424 PUSH_INSNL(ret, location, jump, deconstructed_label);
2425 }
2426 else {
2427 PUSH_INSNL(ret, location, jump, deconstruct_label);
2428 }
2429
2430 PUSH_LABEL(ret, deconstruct_label);
2431 PUSH_INSN(ret, location, dup);
2432
2433 VALUE operand = ID2SYM(rb_intern("deconstruct"));
2434 PUSH_INSN1(ret, location, putobject, operand);
2435 PUSH_SEND(ret, location, idRespond_to, INT2FIX(1));
2436
2437 if (use_deconstructed_cache) {
2438 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE + 1));
2439 }
2440
2441 if (in_single_pattern) {
2442 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p does not respond to #deconstruct"), base_index + 1));
2443 }
2444
2445 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2446 PUSH_SEND(ret, location, rb_intern("deconstruct"), INT2FIX(0));
2447
2448 if (use_deconstructed_cache) {
2449 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE));
2450 }
2451
2452 PUSH_INSN(ret, location, dup);
2453 PUSH_INSN1(ret, location, checktype, INT2FIX(T_ARRAY));
2454 PUSH_INSNL(ret, location, branchunless, type_error_label);
2455 PUSH_LABEL(ret, deconstructed_label);
2456
2457 return COMPILE_OK;
2458}
2459
2464static int
2465pm_compile_pattern_constant(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *match_failed_label, bool in_single_pattern, unsigned int base_index)
2466{
2467 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2468
2469 PUSH_INSN(ret, location, dup);
2470 PM_COMPILE_NOT_POPPED(node);
2471
2472 if (in_single_pattern) {
2473 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
2474 }
2475 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE));
2476 if (in_single_pattern) {
2477 CHECK(pm_compile_pattern_eqq_error(iseq, scope_node, node, ret, base_index + 3));
2478 }
2479 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2480 return COMPILE_OK;
2481}
2482
2487static void
2488pm_compile_pattern_error_handler(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *done_label, bool popped)
2489{
2490 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2491 LABEL *key_error_label = NEW_LABEL(location.line);
2492 LABEL *cleanup_label = NEW_LABEL(location.line);
2493
2494 struct rb_callinfo_kwarg *kw_arg = rb_xmalloc_mul_add(2, sizeof(VALUE), sizeof(struct rb_callinfo_kwarg));
2495 kw_arg->references = 0;
2496 kw_arg->keyword_len = 2;
2497 kw_arg->keywords[0] = ID2SYM(rb_intern("matchee"));
2498 kw_arg->keywords[1] = ID2SYM(rb_intern("key"));
2499
2500 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2501 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2502 PUSH_INSNL(ret, location, branchif, key_error_label);
2503
2504 PUSH_INSN1(ret, location, putobject, rb_eNoMatchingPatternError);
2505 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2506
2507 {
2508 VALUE operand = rb_fstring_lit("%p: %s");
2509 PUSH_INSN1(ret, location, putobject, operand);
2510 }
2511
2512 PUSH_INSN1(ret, location, topn, INT2FIX(4));
2513 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 6));
2514 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2515 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2516 PUSH_INSNL(ret, location, jump, cleanup_label);
2517
2518 PUSH_LABEL(ret, key_error_label);
2519 PUSH_INSN1(ret, location, putobject, rb_eNoMatchingPatternKeyError);
2520 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2521
2522 {
2523 VALUE operand = rb_fstring_lit("%p: %s");
2524 PUSH_INSN1(ret, location, putobject, operand);
2525 }
2526
2527 PUSH_INSN1(ret, location, topn, INT2FIX(4));
2528 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 6));
2529 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(3));
2530 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE + 4));
2531 PUSH_INSN1(ret, location, topn, INT2FIX(PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY + 5));
2532 PUSH_SEND_R(ret, location, rb_intern("new"), INT2FIX(1), NULL, INT2FIX(VM_CALL_KWARG), kw_arg);
2533 PUSH_SEND(ret, location, id_core_raise, INT2FIX(1));
2534 PUSH_LABEL(ret, cleanup_label);
2535
2536 PUSH_INSN1(ret, location, adjuststack, INT2FIX(7));
2537 if (!popped) PUSH_INSN(ret, location, putnil);
2538 PUSH_INSNL(ret, location, jump, done_label);
2539 PUSH_INSN1(ret, location, dupn, INT2FIX(5));
2540 if (popped) PUSH_INSN(ret, location, putnil);
2541}
2542
2546static int
2547pm_compile_pattern(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_t *node, LINK_ANCHOR *const ret, LABEL *matched_label, LABEL *unmatched_label, bool in_single_pattern, bool use_deconstructed_cache, unsigned int base_index)
2548{
2549 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
2550
2551 switch (PM_NODE_TYPE(node)) {
2552 case PM_ARRAY_PATTERN_NODE: {
2553 // Array patterns in pattern matching are triggered by using commas in
2554 // a pattern or wrapping it in braces. They are represented by a
2555 // ArrayPatternNode. This looks like:
2556 //
2557 // foo => [1, 2, 3]
2558 //
2559 // It can optionally have a splat in the middle of it, which can
2560 // optionally have a name attached.
2561 const pm_array_pattern_node_t *cast = (const pm_array_pattern_node_t *) node;
2562
2563 const size_t requireds_size = cast->requireds.size;
2564 const size_t posts_size = cast->posts.size;
2565 const size_t minimum_size = requireds_size + posts_size;
2566
2567 bool rest_named = false;
2568 bool use_rest_size = false;
2569
2570 if (cast->rest != NULL) {
2571 rest_named = (PM_NODE_TYPE_P(cast->rest, PM_SPLAT_NODE) && ((const pm_splat_node_t *) cast->rest)->expression != NULL);
2572 use_rest_size = (rest_named || (!rest_named && posts_size > 0));
2573 }
2574
2575 LABEL *match_failed_label = NEW_LABEL(location.line);
2576 LABEL *type_error_label = NEW_LABEL(location.line);
2577 LABEL *deconstruct_label = NEW_LABEL(location.line);
2578 LABEL *deconstructed_label = NEW_LABEL(location.line);
2579
2580 if (use_rest_size) {
2581 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2582 PUSH_INSN(ret, location, swap);
2583 base_index++;
2584 }
2585
2586 if (cast->constant != NULL) {
2587 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2588 }
2589
2590 CHECK(pm_compile_pattern_deconstruct(iseq, scope_node, node, ret, deconstruct_label, match_failed_label, deconstructed_label, type_error_label, in_single_pattern, use_deconstructed_cache, base_index));
2591
2592 PUSH_INSN(ret, location, dup);
2593 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2594 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2595 PUSH_SEND(ret, location, cast->rest == NULL ? idEq : idGE, INT2FIX(1));
2596 if (in_single_pattern) {
2597 VALUE message = cast->rest == NULL ? rb_fstring_lit("%p length mismatch (given %p, expected %p)") : rb_fstring_lit("%p length mismatch (given %p, expected %p+)");
2598 CHECK(pm_compile_pattern_length_error(iseq, scope_node, node, ret, message, INT2FIX(minimum_size), base_index + 1));
2599 }
2600 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2601
2602 for (size_t index = 0; index < requireds_size; index++) {
2603 const pm_node_t *required = cast->requireds.nodes[index];
2604 PUSH_INSN(ret, location, dup);
2605 PUSH_INSN1(ret, location, putobject, INT2FIX(index));
2606 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2607 CHECK(pm_compile_pattern_match(iseq, scope_node, required, ret, match_failed_label, in_single_pattern, false, base_index + 1));
2608 }
2609
2610 if (cast->rest != NULL) {
2611 if (rest_named) {
2612 PUSH_INSN(ret, location, dup);
2613 PUSH_INSN1(ret, location, putobject, INT2FIX(requireds_size));
2614 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2615 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2616 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2617 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2618 PUSH_INSN1(ret, location, setn, INT2FIX(4));
2619 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2620 CHECK(pm_compile_pattern_match(iseq, scope_node, ((const pm_splat_node_t *) cast->rest)->expression, ret, match_failed_label, in_single_pattern, false, base_index + 1));
2621 }
2622 else if (posts_size > 0) {
2623 PUSH_INSN(ret, location, dup);
2624 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2625 PUSH_INSN1(ret, location, putobject, INT2FIX(minimum_size));
2626 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2627 PUSH_INSN1(ret, location, setn, INT2FIX(2));
2628 PUSH_INSN(ret, location, pop);
2629 }
2630 }
2631
2632 for (size_t index = 0; index < posts_size; index++) {
2633 const pm_node_t *post = cast->posts.nodes[index];
2634 PUSH_INSN(ret, location, dup);
2635
2636 PUSH_INSN1(ret, location, putobject, INT2FIX(requireds_size + index));
2637 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2638 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2639 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2640 CHECK(pm_compile_pattern_match(iseq, scope_node, post, ret, match_failed_label, in_single_pattern, false, base_index + 1));
2641 }
2642
2643 PUSH_INSN(ret, location, pop);
2644 if (use_rest_size) {
2645 PUSH_INSN(ret, location, pop);
2646 }
2647
2648 PUSH_INSNL(ret, location, jump, matched_label);
2649 PUSH_INSN(ret, location, putnil);
2650 if (use_rest_size) {
2651 PUSH_INSN(ret, location, putnil);
2652 }
2653
2654 PUSH_LABEL(ret, type_error_label);
2655 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2656 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2657
2658 {
2659 VALUE operand = rb_fstring_lit("deconstruct must return Array");
2660 PUSH_INSN1(ret, location, putobject, operand);
2661 }
2662
2663 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2664 PUSH_INSN(ret, location, pop);
2665
2666 PUSH_LABEL(ret, match_failed_label);
2667 PUSH_INSN(ret, location, pop);
2668 if (use_rest_size) {
2669 PUSH_INSN(ret, location, pop);
2670 }
2671
2672 PUSH_INSNL(ret, location, jump, unmatched_label);
2673 break;
2674 }
2675 case PM_FIND_PATTERN_NODE: {
2676 // Find patterns in pattern matching are triggered by using commas in
2677 // a pattern or wrapping it in braces and using a splat on both the left
2678 // and right side of the pattern. This looks like:
2679 //
2680 // foo => [*, 1, 2, 3, *]
2681 //
2682 // There can be any number of requireds in the middle. The splats on
2683 // both sides can optionally have names attached.
2684 const pm_find_pattern_node_t *cast = (const pm_find_pattern_node_t *) node;
2685 const size_t size = cast->requireds.size;
2686
2687 LABEL *match_failed_label = NEW_LABEL(location.line);
2688 LABEL *type_error_label = NEW_LABEL(location.line);
2689 LABEL *deconstruct_label = NEW_LABEL(location.line);
2690 LABEL *deconstructed_label = NEW_LABEL(location.line);
2691
2692 if (cast->constant) {
2693 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2694 }
2695
2696 CHECK(pm_compile_pattern_deconstruct(iseq, scope_node, node, ret, deconstruct_label, match_failed_label, deconstructed_label, type_error_label, in_single_pattern, use_deconstructed_cache, base_index));
2697
2698 PUSH_INSN(ret, location, dup);
2699 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2700 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2701 PUSH_SEND(ret, location, idGE, INT2FIX(1));
2702 if (in_single_pattern) {
2703 CHECK(pm_compile_pattern_length_error(iseq, scope_node, node, ret, rb_fstring_lit("%p length mismatch (given %p, expected %p+)"), INT2FIX(size), base_index + 1));
2704 }
2705 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2706
2707 {
2708 LABEL *while_begin_label = NEW_LABEL(location.line);
2709 LABEL *next_loop_label = NEW_LABEL(location.line);
2710 LABEL *find_succeeded_label = NEW_LABEL(location.line);
2711 LABEL *find_failed_label = NEW_LABEL(location.line);
2712
2713 PUSH_INSN(ret, location, dup);
2714 PUSH_SEND(ret, location, idLength, INT2FIX(0));
2715
2716 PUSH_INSN(ret, location, dup);
2717 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2718 PUSH_SEND(ret, location, idMINUS, INT2FIX(1));
2719 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2720 PUSH_LABEL(ret, while_begin_label);
2721
2722 PUSH_INSN(ret, location, dup);
2723 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2724 PUSH_SEND(ret, location, idLE, INT2FIX(1));
2725 PUSH_INSNL(ret, location, branchunless, find_failed_label);
2726
2727 for (size_t index = 0; index < size; index++) {
2728 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2729 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2730
2731 if (index != 0) {
2732 PUSH_INSN1(ret, location, putobject, INT2FIX(index));
2733 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2734 }
2735
2736 PUSH_SEND(ret, location, idAREF, INT2FIX(1));
2737 CHECK(pm_compile_pattern_match(iseq, scope_node, cast->requireds.nodes[index], ret, next_loop_label, in_single_pattern, false, base_index + 4));
2738 }
2739
2740 const pm_splat_node_t *left = cast->left;
2741
2742 if (left->expression != NULL) {
2743 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2744 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
2745 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2746 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2747 CHECK(pm_compile_pattern_match(iseq, scope_node, left->expression, ret, find_failed_label, in_single_pattern, false, base_index + 4));
2748 }
2749
2750 RUBY_ASSERT(PM_NODE_TYPE_P(cast->right, PM_SPLAT_NODE));
2751 const pm_splat_node_t *right = (const pm_splat_node_t *) cast->right;
2752
2753 if (right->expression != NULL) {
2754 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2755 PUSH_INSN1(ret, location, topn, INT2FIX(1));
2756 PUSH_INSN1(ret, location, putobject, INT2FIX(size));
2757 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2758 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2759 PUSH_SEND(ret, location, idAREF, INT2FIX(2));
2760 pm_compile_pattern_match(iseq, scope_node, right->expression, ret, find_failed_label, in_single_pattern, false, base_index + 4);
2761 }
2762
2763 PUSH_INSNL(ret, location, jump, find_succeeded_label);
2764
2765 PUSH_LABEL(ret, next_loop_label);
2766 PUSH_INSN1(ret, location, putobject, INT2FIX(1));
2767 PUSH_SEND(ret, location, idPLUS, INT2FIX(1));
2768 PUSH_INSNL(ret, location, jump, while_begin_label);
2769
2770 PUSH_LABEL(ret, find_failed_label);
2771 PUSH_INSN1(ret, location, adjuststack, INT2FIX(3));
2772 if (in_single_pattern) {
2773 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2774
2775 {
2776 VALUE operand = rb_fstring_lit("%p does not match to find pattern");
2777 PUSH_INSN1(ret, location, putobject, operand);
2778 }
2779
2780 PUSH_INSN1(ret, location, topn, INT2FIX(2));
2781 PUSH_SEND(ret, location, id_core_sprintf, INT2FIX(2));
2782 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
2783
2784 PUSH_INSN1(ret, location, putobject, Qfalse);
2785 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
2786
2787 PUSH_INSN(ret, location, pop);
2788 PUSH_INSN(ret, location, pop);
2789 }
2790 PUSH_INSNL(ret, location, jump, match_failed_label);
2791 PUSH_INSN1(ret, location, dupn, INT2FIX(3));
2792
2793 PUSH_LABEL(ret, find_succeeded_label);
2794 PUSH_INSN1(ret, location, adjuststack, INT2FIX(3));
2795 }
2796
2797 PUSH_INSN(ret, location, pop);
2798 PUSH_INSNL(ret, location, jump, matched_label);
2799 PUSH_INSN(ret, location, putnil);
2800
2801 PUSH_LABEL(ret, type_error_label);
2802 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2803 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2804
2805 {
2806 VALUE operand = rb_fstring_lit("deconstruct must return Array");
2807 PUSH_INSN1(ret, location, putobject, operand);
2808 }
2809
2810 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2811 PUSH_INSN(ret, location, pop);
2812
2813 PUSH_LABEL(ret, match_failed_label);
2814 PUSH_INSN(ret, location, pop);
2815 PUSH_INSNL(ret, location, jump, unmatched_label);
2816
2817 break;
2818 }
2819 case PM_HASH_PATTERN_NODE: {
2820 // Hash patterns in pattern matching are triggered by using labels and
2821 // values in a pattern or by using the ** operator. They are represented
2822 // by the HashPatternNode. This looks like:
2823 //
2824 // foo => { a: 1, b: 2, **bar }
2825 //
2826 // It can optionally have an assoc splat in the middle of it, which can
2827 // optionally have a name.
2828 const pm_hash_pattern_node_t *cast = (const pm_hash_pattern_node_t *) node;
2829
2830 // We don't consider it a "rest" parameter if it's a ** that is unnamed.
2831 bool has_rest = cast->rest != NULL && !(PM_NODE_TYPE_P(cast->rest, PM_ASSOC_SPLAT_NODE) && ((const pm_assoc_splat_node_t *) cast->rest)->value == NULL);
2832 bool has_keys = cast->elements.size > 0 || cast->rest != NULL;
2833
2834 LABEL *match_failed_label = NEW_LABEL(location.line);
2835 LABEL *type_error_label = NEW_LABEL(location.line);
2836 VALUE keys = Qnil;
2837
2838 if (has_keys && !has_rest) {
2839 keys = rb_ary_new_capa(cast->elements.size);
2840
2841 for (size_t index = 0; index < cast->elements.size; index++) {
2842 const pm_node_t *element = cast->elements.nodes[index];
2843 RUBY_ASSERT(PM_NODE_TYPE_P(element, PM_ASSOC_NODE));
2844
2845 const pm_node_t *key = ((const pm_assoc_node_t *) element)->key;
2846 RUBY_ASSERT(PM_NODE_TYPE_P(key, PM_SYMBOL_NODE));
2847
2848 VALUE symbol = ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) key));
2849 rb_ary_push(keys, symbol);
2850 }
2851 }
2852
2853 if (cast->constant) {
2854 CHECK(pm_compile_pattern_constant(iseq, scope_node, cast->constant, ret, match_failed_label, in_single_pattern, base_index));
2855 }
2856
2857 PUSH_INSN(ret, location, dup);
2858
2859 {
2860 VALUE operand = ID2SYM(rb_intern("deconstruct_keys"));
2861 PUSH_INSN1(ret, location, putobject, operand);
2862 }
2863
2864 PUSH_SEND(ret, location, idRespond_to, INT2FIX(1));
2865 if (in_single_pattern) {
2866 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p does not respond to #deconstruct_keys"), base_index + 1));
2867 }
2868 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2869
2870 if (NIL_P(keys)) {
2871 PUSH_INSN(ret, location, putnil);
2872 }
2873 else {
2874 rb_obj_hide(keys);
2875 RB_OBJ_SET_FROZEN_SHAREABLE(keys);
2876 PUSH_INSN1(ret, location, duparray, keys);
2877 RB_OBJ_WRITTEN(iseq, Qundef, keys);
2878 }
2879 PUSH_SEND(ret, location, rb_intern("deconstruct_keys"), INT2FIX(1));
2880
2881 PUSH_INSN(ret, location, dup);
2882 PUSH_INSN1(ret, location, checktype, INT2FIX(T_HASH));
2883 PUSH_INSNL(ret, location, branchunless, type_error_label);
2884
2885 if (has_rest) {
2886 PUSH_SEND(ret, location, rb_intern("dup"), INT2FIX(0));
2887 }
2888
2889 if (has_keys) {
2890 DECL_ANCHOR(match_values);
2891
2892 for (size_t index = 0; index < cast->elements.size; index++) {
2893 const pm_node_t *element = cast->elements.nodes[index];
2894 RUBY_ASSERT(PM_NODE_TYPE_P(element, PM_ASSOC_NODE));
2895
2896 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
2897 const pm_node_t *key = assoc->key;
2898 RUBY_ASSERT(PM_NODE_TYPE_P(key, PM_SYMBOL_NODE));
2899
2900 VALUE symbol = ID2SYM(parse_string_symbol(scope_node, (const pm_symbol_node_t *) key));
2901 PUSH_INSN(ret, location, dup);
2902 PUSH_INSN1(ret, location, putobject, symbol);
2903 PUSH_SEND(ret, location, rb_intern("key?"), INT2FIX(1));
2904
2905 if (in_single_pattern) {
2906 LABEL *match_succeeded_label = NEW_LABEL(location.line);
2907
2908 PUSH_INSN(ret, location, dup);
2909 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
2910
2911 {
2912 VALUE operand = rb_str_freeze(rb_sprintf("key not found: %+"PRIsVALUE, symbol));
2913 RB_OBJ_SET_SHAREABLE(operand);
2914 PUSH_INSN1(ret, location, putobject, operand);
2915 }
2916
2917 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 2));
2918 PUSH_INSN1(ret, location, putobject, Qtrue);
2919 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 3));
2920 PUSH_INSN1(ret, location, topn, INT2FIX(3));
2921 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE + 4));
2922 PUSH_INSN1(ret, location, putobject, symbol);
2923 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY + 5));
2924
2925 PUSH_INSN1(ret, location, adjuststack, INT2FIX(4));
2926 PUSH_LABEL(ret, match_succeeded_label);
2927 }
2928
2929 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2930 PUSH_INSN(match_values, location, dup);
2931 PUSH_INSN1(match_values, location, putobject, symbol);
2932 PUSH_SEND(match_values, location, has_rest ? rb_intern("delete") : idAREF, INT2FIX(1));
2933
2934 const pm_node_t *value = assoc->value;
2935 if (PM_NODE_TYPE_P(value, PM_IMPLICIT_NODE)) {
2936 value = ((const pm_implicit_node_t *) value)->value;
2937 }
2938
2939 CHECK(pm_compile_pattern_match(iseq, scope_node, value, match_values, match_failed_label, in_single_pattern, false, base_index + 1));
2940 }
2941
2942 PUSH_SEQ(ret, match_values);
2943 }
2944 else {
2945 PUSH_INSN(ret, location, dup);
2946 PUSH_SEND(ret, location, idEmptyP, INT2FIX(0));
2947 if (in_single_pattern) {
2948 CHECK(pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("%p is not empty"), base_index + 1));
2949 }
2950 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2951 }
2952
2953 if (has_rest) {
2954 switch (PM_NODE_TYPE(cast->rest)) {
2955 case PM_NO_KEYWORDS_PARAMETER_NODE: {
2956 PUSH_INSN(ret, location, dup);
2957 PUSH_SEND(ret, location, idEmptyP, INT2FIX(0));
2958 if (in_single_pattern) {
2959 pm_compile_pattern_generic_error(iseq, scope_node, node, ret, rb_fstring_lit("rest of %p is not empty"), base_index + 1);
2960 }
2961 PUSH_INSNL(ret, location, branchunless, match_failed_label);
2962 break;
2963 }
2964 case PM_ASSOC_SPLAT_NODE: {
2965 const pm_assoc_splat_node_t *splat = (const pm_assoc_splat_node_t *) cast->rest;
2966 PUSH_INSN(ret, location, dup);
2967 pm_compile_pattern_match(iseq, scope_node, splat->value, ret, match_failed_label, in_single_pattern, false, base_index + 1);
2968 break;
2969 }
2970 default:
2971 rb_bug("unreachable");
2972 break;
2973 }
2974 }
2975
2976 PUSH_INSN(ret, location, pop);
2977 PUSH_INSNL(ret, location, jump, matched_label);
2978 PUSH_INSN(ret, location, putnil);
2979
2980 PUSH_LABEL(ret, type_error_label);
2981 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
2982 PUSH_INSN1(ret, location, putobject, rb_eTypeError);
2983
2984 {
2985 VALUE operand = rb_fstring_lit("deconstruct_keys must return Hash");
2986 PUSH_INSN1(ret, location, putobject, operand);
2987 }
2988
2989 PUSH_SEND(ret, location, id_core_raise, INT2FIX(2));
2990 PUSH_INSN(ret, location, pop);
2991
2992 PUSH_LABEL(ret, match_failed_label);
2993 PUSH_INSN(ret, location, pop);
2994 PUSH_INSNL(ret, location, jump, unmatched_label);
2995 break;
2996 }
2997 case PM_CAPTURE_PATTERN_NODE: {
2998 // Capture patterns allow you to pattern match against an element in a
2999 // pattern and also capture the value into a local variable. This looks
3000 // like:
3001 //
3002 // [1] => [Integer => foo]
3003 //
3004 // In this case the `Integer => foo` will be represented by a
3005 // CapturePatternNode, which has both a value (the pattern to match
3006 // against) and a target (the place to write the variable into).
3007 const pm_capture_pattern_node_t *cast = (const pm_capture_pattern_node_t *) node;
3008
3009 LABEL *match_failed_label = NEW_LABEL(location.line);
3010
3011 PUSH_INSN(ret, location, dup);
3012 CHECK(pm_compile_pattern_match(iseq, scope_node, cast->value, ret, match_failed_label, in_single_pattern, use_deconstructed_cache, base_index + 1));
3013 CHECK(pm_compile_pattern(iseq, scope_node, (const pm_node_t *) cast->target, ret, matched_label, match_failed_label, in_single_pattern, false, base_index));
3014 PUSH_INSN(ret, location, putnil);
3015
3016 PUSH_LABEL(ret, match_failed_label);
3017 PUSH_INSN(ret, location, pop);
3018 PUSH_INSNL(ret, location, jump, unmatched_label);
3019
3020 break;
3021 }
3022 case PM_LOCAL_VARIABLE_TARGET_NODE: {
3023 // Local variables can be targeted by placing identifiers in the place
3024 // of a pattern. For example, foo in bar. This results in the value
3025 // being matched being written to that local variable.
3027 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
3028
3029 PUSH_SETLOCAL(ret, location, index.index, index.level);
3030 PUSH_INSNL(ret, location, jump, matched_label);
3031 break;
3032 }
3033 case PM_ALTERNATION_PATTERN_NODE: {
3034 // Alternation patterns allow you to specify multiple patterns in a
3035 // single expression using the | operator.
3037
3038 LABEL *matched_left_label = NEW_LABEL(location.line);
3039 LABEL *unmatched_left_label = NEW_LABEL(location.line);
3040
3041 // First, we're going to attempt to match against the left pattern. If
3042 // that pattern matches, then we'll skip matching the right pattern.
3043 PUSH_INSN(ret, location, dup);
3044 CHECK(pm_compile_pattern(iseq, scope_node, cast->left, ret, matched_left_label, unmatched_left_label, in_single_pattern, use_deconstructed_cache, base_index + 1));
3045
3046 // If we get here, then we matched on the left pattern. In this case we
3047 // should pop out the duplicate value that we preemptively added to
3048 // match against the right pattern and then jump to the match label.
3049 PUSH_LABEL(ret, matched_left_label);
3050 PUSH_INSN(ret, location, pop);
3051 PUSH_INSNL(ret, location, jump, matched_label);
3052 PUSH_INSN(ret, location, putnil);
3053
3054 // If we get here, then we didn't match on the left pattern. In this
3055 // case we attempt to match against the right pattern.
3056 PUSH_LABEL(ret, unmatched_left_label);
3057 CHECK(pm_compile_pattern(iseq, scope_node, cast->right, ret, matched_label, unmatched_label, in_single_pattern, use_deconstructed_cache, base_index));
3058 break;
3059 }
3060 case PM_PARENTHESES_NODE:
3061 // Parentheses are allowed to wrap expressions in pattern matching and
3062 // they do nothing since they can only wrap individual expressions and
3063 // not groups. In this case we'll recurse back into this same function
3064 // with the body of the parentheses.
3065 return pm_compile_pattern(iseq, scope_node, ((const pm_parentheses_node_t *) node)->body, ret, matched_label, unmatched_label, in_single_pattern, use_deconstructed_cache, base_index);
3066 case PM_PINNED_EXPRESSION_NODE:
3067 // Pinned expressions are a way to match against the value of an
3068 // expression that should be evaluated at runtime. This looks like:
3069 // foo in ^(bar). To compile these, we compile the expression as if it
3070 // were a literal value by falling through to the literal case.
3071 node = ((const pm_pinned_expression_node_t *) node)->expression;
3072 /* fallthrough */
3073 case PM_ARRAY_NODE:
3074 case PM_CLASS_VARIABLE_READ_NODE:
3075 case PM_CONSTANT_PATH_NODE:
3076 case PM_CONSTANT_READ_NODE:
3077 case PM_FALSE_NODE:
3078 case PM_FLOAT_NODE:
3079 case PM_GLOBAL_VARIABLE_READ_NODE:
3080 case PM_IMAGINARY_NODE:
3081 case PM_INSTANCE_VARIABLE_READ_NODE:
3082 case PM_IT_LOCAL_VARIABLE_READ_NODE:
3083 case PM_INTEGER_NODE:
3084 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE:
3085 case PM_INTERPOLATED_STRING_NODE:
3086 case PM_INTERPOLATED_SYMBOL_NODE:
3087 case PM_INTERPOLATED_X_STRING_NODE:
3088 case PM_LAMBDA_NODE:
3089 case PM_LOCAL_VARIABLE_READ_NODE:
3090 case PM_NIL_NODE:
3091 case PM_SOURCE_ENCODING_NODE:
3092 case PM_SOURCE_FILE_NODE:
3093 case PM_SOURCE_LINE_NODE:
3094 case PM_RANGE_NODE:
3095 case PM_RATIONAL_NODE:
3096 case PM_REGULAR_EXPRESSION_NODE:
3097 case PM_SELF_NODE:
3098 case PM_STRING_NODE:
3099 case PM_SYMBOL_NODE:
3100 case PM_TRUE_NODE:
3101 case PM_X_STRING_NODE: {
3102 // These nodes are all simple patterns, which means we'll use the
3103 // checkmatch instruction to match against them, which is effectively a
3104 // VM-level === operator.
3105 PM_COMPILE_NOT_POPPED(node);
3106 if (in_single_pattern) {
3107 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
3108 }
3109
3110 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE));
3111
3112 if (in_single_pattern) {
3113 pm_compile_pattern_eqq_error(iseq, scope_node, node, ret, base_index + 2);
3114 }
3115
3116 PUSH_INSNL(ret, location, branchif, matched_label);
3117 PUSH_INSNL(ret, location, jump, unmatched_label);
3118 break;
3119 }
3120 case PM_PINNED_VARIABLE_NODE: {
3121 // Pinned variables are a way to match against the value of a variable
3122 // without it looking like you're trying to write to the variable. This
3123 // looks like: foo in ^@bar. To compile these, we compile the variable
3124 // that they hold.
3125 const pm_pinned_variable_node_t *cast = (const pm_pinned_variable_node_t *) node;
3126 CHECK(pm_compile_pattern(iseq, scope_node, cast->variable, ret, matched_label, unmatched_label, in_single_pattern, true, base_index));
3127 break;
3128 }
3129 case PM_IF_NODE:
3130 case PM_UNLESS_NODE: {
3131 // If and unless nodes can show up here as guards on `in` clauses. This
3132 // looks like:
3133 //
3134 // case foo
3135 // in bar if baz?
3136 // qux
3137 // end
3138 //
3139 // Because we know they're in the modifier form and they can't have any
3140 // variation on this pattern, we compile them differently (more simply)
3141 // here than we would in the normal compilation path.
3142 const pm_node_t *predicate;
3143 const pm_node_t *statement;
3144
3145 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
3146 const pm_if_node_t *cast = (const pm_if_node_t *) node;
3147 predicate = cast->predicate;
3148
3149 RUBY_ASSERT(cast->statements != NULL && cast->statements->body.size == 1);
3150 statement = cast->statements->body.nodes[0];
3151 }
3152 else {
3153 const pm_unless_node_t *cast = (const pm_unless_node_t *) node;
3154 predicate = cast->predicate;
3155
3156 RUBY_ASSERT(cast->statements != NULL && cast->statements->body.size == 1);
3157 statement = cast->statements->body.nodes[0];
3158 }
3159
3160 CHECK(pm_compile_pattern_match(iseq, scope_node, statement, ret, unmatched_label, in_single_pattern, use_deconstructed_cache, base_index));
3161 PM_COMPILE_NOT_POPPED(predicate);
3162
3163 if (in_single_pattern) {
3164 LABEL *match_succeeded_label = NEW_LABEL(location.line);
3165
3166 PUSH_INSN(ret, location, dup);
3167 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
3168 PUSH_INSNL(ret, location, branchif, match_succeeded_label);
3169 }
3170 else {
3171 PUSH_INSNL(ret, location, branchunless, match_succeeded_label);
3172 }
3173
3174 {
3175 VALUE operand = rb_fstring_lit("guard clause does not return true");
3176 PUSH_INSN1(ret, location, putobject, operand);
3177 }
3178
3179 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING + 1));
3180 PUSH_INSN1(ret, location, putobject, Qfalse);
3181 PUSH_INSN1(ret, location, setn, INT2FIX(base_index + PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P + 2));
3182
3183 PUSH_INSN(ret, location, pop);
3184 PUSH_INSN(ret, location, pop);
3185
3186 PUSH_LABEL(ret, match_succeeded_label);
3187 }
3188
3189 if (PM_NODE_TYPE_P(node, PM_IF_NODE)) {
3190 PUSH_INSNL(ret, location, branchunless, unmatched_label);
3191 }
3192 else {
3193 PUSH_INSNL(ret, location, branchif, unmatched_label);
3194 }
3195
3196 PUSH_INSNL(ret, location, jump, matched_label);
3197 break;
3198 }
3199 default:
3200 // If we get here, then we have a node type that should not be in this
3201 // position. This would be a bug in the parser, because a different node
3202 // type should never have been created in this position in the tree.
3203 rb_bug("Unexpected node type in pattern matching expression: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
3204 break;
3205 }
3206
3207 return COMPILE_OK;
3208}
3209
3210#undef PM_PATTERN_BASE_INDEX_OFFSET_DECONSTRUCTED_CACHE
3211#undef PM_PATTERN_BASE_INDEX_OFFSET_ERROR_STRING
3212#undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_P
3213#undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_MATCHEE
3214#undef PM_PATTERN_BASE_INDEX_OFFSET_KEY_ERROR_KEY
3215
3216// Generate a scope node from the given node.
3217void
3218pm_scope_node_init(const pm_node_t *node, pm_scope_node_t *scope, pm_scope_node_t *previous)
3219{
3220 // This is very important, otherwise the scope node could be seen as having
3221 // certain flags set that _should not_ be set.
3222 memset(scope, 0, sizeof(pm_scope_node_t));
3223
3224 scope->base.type = PM_SCOPE_NODE;
3225 scope->base.location.start = node->location.start;
3226 scope->base.location.end = node->location.end;
3227
3228 scope->previous = previous;
3229 scope->ast_node = (pm_node_t *) node;
3230
3231 if (previous) {
3232 scope->parser = previous->parser;
3233 scope->encoding = previous->encoding;
3234 scope->filepath_encoding = previous->filepath_encoding;
3235 scope->constants = previous->constants;
3236 scope->coverage_enabled = previous->coverage_enabled;
3237 scope->script_lines = previous->script_lines;
3238 }
3239
3240 switch (PM_NODE_TYPE(node)) {
3241 case PM_BLOCK_NODE: {
3242 const pm_block_node_t *cast = (const pm_block_node_t *) node;
3243 scope->body = cast->body;
3244 scope->locals = cast->locals;
3245 scope->parameters = cast->parameters;
3246 break;
3247 }
3248 case PM_CLASS_NODE: {
3249 const pm_class_node_t *cast = (const pm_class_node_t *) node;
3250 scope->body = cast->body;
3251 scope->locals = cast->locals;
3252 break;
3253 }
3254 case PM_DEF_NODE: {
3255 const pm_def_node_t *cast = (const pm_def_node_t *) node;
3256 scope->parameters = (pm_node_t *) cast->parameters;
3257 scope->body = cast->body;
3258 scope->locals = cast->locals;
3259 break;
3260 }
3261 case PM_ENSURE_NODE: {
3262 const pm_ensure_node_t *cast = (const pm_ensure_node_t *) node;
3263 scope->body = (pm_node_t *) node;
3264
3265 if (cast->statements != NULL) {
3266 scope->base.location.start = cast->statements->base.location.start;
3267 scope->base.location.end = cast->statements->base.location.end;
3268 }
3269
3270 break;
3271 }
3272 case PM_FOR_NODE: {
3273 const pm_for_node_t *cast = (const pm_for_node_t *) node;
3274 scope->body = (pm_node_t *) cast->statements;
3275 break;
3276 }
3277 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
3278 RUBY_ASSERT(node->flags & PM_REGULAR_EXPRESSION_FLAGS_ONCE);
3279 scope->body = (pm_node_t *) node;
3280 break;
3281 }
3282 case PM_LAMBDA_NODE: {
3283 const pm_lambda_node_t *cast = (const pm_lambda_node_t *) node;
3284 scope->parameters = cast->parameters;
3285 scope->body = cast->body;
3286 scope->locals = cast->locals;
3287
3288 if (cast->parameters != NULL) {
3289 scope->base.location.start = cast->parameters->location.start;
3290 }
3291 else {
3292 scope->base.location.start = cast->operator_loc.end;
3293 }
3294 break;
3295 }
3296 case PM_MODULE_NODE: {
3297 const pm_module_node_t *cast = (const pm_module_node_t *) node;
3298 scope->body = cast->body;
3299 scope->locals = cast->locals;
3300 break;
3301 }
3302 case PM_POST_EXECUTION_NODE: {
3303 const pm_post_execution_node_t *cast = (const pm_post_execution_node_t *) node;
3304 scope->body = (pm_node_t *) cast->statements;
3305 break;
3306 }
3307 case PM_PROGRAM_NODE: {
3308 const pm_program_node_t *cast = (const pm_program_node_t *) node;
3309 scope->body = (pm_node_t *) cast->statements;
3310 scope->locals = cast->locals;
3311 break;
3312 }
3313 case PM_RESCUE_NODE: {
3314 const pm_rescue_node_t *cast = (const pm_rescue_node_t *) node;
3315 scope->body = (pm_node_t *) cast->statements;
3316 break;
3317 }
3318 case PM_RESCUE_MODIFIER_NODE: {
3319 const pm_rescue_modifier_node_t *cast = (const pm_rescue_modifier_node_t *) node;
3320 scope->body = (pm_node_t *) cast->rescue_expression;
3321 break;
3322 }
3323 case PM_SINGLETON_CLASS_NODE: {
3324 const pm_singleton_class_node_t *cast = (const pm_singleton_class_node_t *) node;
3325 scope->body = cast->body;
3326 scope->locals = cast->locals;
3327 break;
3328 }
3329 case PM_STATEMENTS_NODE: {
3330 const pm_statements_node_t *cast = (const pm_statements_node_t *) node;
3331 scope->body = (pm_node_t *) cast;
3332 break;
3333 }
3334 default:
3335 rb_bug("unreachable");
3336 break;
3337 }
3338}
3339
3340void
3341pm_scope_node_destroy(pm_scope_node_t *scope_node)
3342{
3343 if (scope_node->index_lookup_table) {
3344 st_free_table(scope_node->index_lookup_table);
3345 }
3346}
3347
3359static void
3360pm_compile_retry_end_label(rb_iseq_t *iseq, LINK_ANCHOR *const ret, LABEL *retry_end_l)
3361{
3362 INSN *iobj;
3363 LINK_ELEMENT *last_elem = LAST_ELEMENT(ret);
3364 iobj = IS_INSN(last_elem) ? (INSN*) last_elem : (INSN*) get_prev_insn((INSN*) last_elem);
3365 while (!IS_INSN_ID(iobj, send) && !IS_INSN_ID(iobj, invokesuper) && !IS_INSN_ID(iobj, sendforward) && !IS_INSN_ID(iobj, invokesuperforward)) {
3366 iobj = (INSN*) get_prev_insn(iobj);
3367 }
3368 ELEM_INSERT_NEXT(&iobj->link, (LINK_ELEMENT*) retry_end_l);
3369
3370 // LINK_ANCHOR has a pointer to the last element, but
3371 // ELEM_INSERT_NEXT does not update it even if we add an insn to the
3372 // last of LINK_ANCHOR. So this updates it manually.
3373 if (&iobj->link == LAST_ELEMENT(ret)) {
3374 ret->last = (LINK_ELEMENT*) retry_end_l;
3375 }
3376}
3377
3378static const char *
3379pm_iseq_builtin_function_name(const pm_scope_node_t *scope_node, const pm_node_t *receiver, ID method_id)
3380{
3381 const char *name = rb_id2name(method_id);
3382 static const char prefix[] = "__builtin_";
3383 const size_t prefix_len = sizeof(prefix) - 1;
3384
3385 if (receiver == NULL) {
3386 if (UNLIKELY(strncmp(prefix, name, prefix_len) == 0)) {
3387 // __builtin_foo
3388 return &name[prefix_len];
3389 }
3390 }
3391 else if (PM_NODE_TYPE_P(receiver, PM_CALL_NODE)) {
3392 if (PM_NODE_FLAG_P(receiver, PM_CALL_NODE_FLAGS_VARIABLE_CALL)) {
3393 const pm_call_node_t *cast = (const pm_call_node_t *) receiver;
3394 if (pm_constant_id_lookup(scope_node, cast->name) == rb_intern_const("__builtin")) {
3395 // __builtin.foo
3396 return name;
3397 }
3398 }
3399 }
3400 else if (PM_NODE_TYPE_P(receiver, PM_CONSTANT_READ_NODE)) {
3401 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) receiver;
3402 if (pm_constant_id_lookup(scope_node, cast->name) == rb_intern_const("Primitive")) {
3403 // Primitive.foo
3404 return name;
3405 }
3406 }
3407
3408 return NULL;
3409}
3410
3411// Compile Primitive.attr! :leaf, ...
3412static int
3413pm_compile_builtin_attr(rb_iseq_t *iseq, const pm_scope_node_t *scope_node, const pm_arguments_node_t *arguments, const pm_node_location_t *node_location)
3414{
3415 if (arguments == NULL) {
3416 COMPILE_ERROR(iseq, node_location->line, "attr!: no argument");
3417 return COMPILE_NG;
3418 }
3419
3420 const pm_node_t *argument;
3421 PM_NODE_LIST_FOREACH(&arguments->arguments, index, argument) {
3422 if (!PM_NODE_TYPE_P(argument, PM_SYMBOL_NODE)) {
3423 COMPILE_ERROR(iseq, node_location->line, "non symbol argument to attr!: %s", pm_node_type_to_str(PM_NODE_TYPE(argument)));
3424 return COMPILE_NG;
3425 }
3426
3427 VALUE symbol = pm_static_literal_value(iseq, argument, scope_node);
3428 VALUE string = rb_sym2str(symbol);
3429
3430 if (strcmp(RSTRING_PTR(string), "leaf") == 0) {
3431 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_LEAF;
3432 }
3433 else if (strcmp(RSTRING_PTR(string), "inline_block") == 0) {
3434 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_INLINE_BLOCK;
3435 }
3436 else if (strcmp(RSTRING_PTR(string), "use_block") == 0) {
3437 iseq_set_use_block(iseq);
3438 }
3439 else if (strcmp(RSTRING_PTR(string), "c_trace") == 0) {
3440 // Let the iseq act like a C method in backtraces
3441 ISEQ_BODY(iseq)->builtin_attrs |= BUILTIN_ATTR_C_TRACE;
3442 }
3443 else {
3444 COMPILE_ERROR(iseq, node_location->line, "unknown argument to attr!: %s", RSTRING_PTR(string));
3445 return COMPILE_NG;
3446 }
3447 }
3448
3449 return COMPILE_OK;
3450}
3451
3452static int
3453pm_compile_builtin_arg(rb_iseq_t *iseq, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node, const pm_arguments_node_t *arguments, const pm_node_location_t *node_location, int popped)
3454{
3455 if (arguments == NULL) {
3456 COMPILE_ERROR(iseq, node_location->line, "arg!: no argument");
3457 return COMPILE_NG;
3458 }
3459
3460 if (arguments->arguments.size != 1) {
3461 COMPILE_ERROR(iseq, node_location->line, "arg!: too many argument");
3462 return COMPILE_NG;
3463 }
3464
3465 const pm_node_t *argument = arguments->arguments.nodes[0];
3466 if (!PM_NODE_TYPE_P(argument, PM_SYMBOL_NODE)) {
3467 COMPILE_ERROR(iseq, node_location->line, "non symbol argument to arg!: %s", pm_node_type_to_str(PM_NODE_TYPE(argument)));
3468 return COMPILE_NG;
3469 }
3470
3471 if (!popped) {
3472 ID name = parse_string_symbol(scope_node, ((const pm_symbol_node_t *) argument));
3473 int index = ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->local_table_size - get_local_var_idx(iseq, name);
3474
3475 debugs("id: %s idx: %d\n", rb_id2name(name), index);
3476 PUSH_GETLOCAL(ret, *node_location, index, get_lvar_level(iseq));
3477 }
3478
3479 return COMPILE_OK;
3480}
3481
3482static int
3483pm_compile_builtin_mandatory_only_method(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_call_node_t *call_node, const pm_node_location_t *node_location)
3484{
3485 const pm_node_t *ast_node = scope_node->ast_node;
3486 if (!PM_NODE_TYPE_P(ast_node, PM_DEF_NODE)) {
3487 rb_bug("mandatory_only?: not in method definition");
3488 return COMPILE_NG;
3489 }
3490
3491 const pm_def_node_t *def_node = (const pm_def_node_t *) ast_node;
3492 const pm_parameters_node_t *parameters_node = def_node->parameters;
3493 if (parameters_node == NULL) {
3494 rb_bug("mandatory_only?: in method definition with no parameters");
3495 return COMPILE_NG;
3496 }
3497
3498 const pm_node_t *body_node = def_node->body;
3499 if (body_node == NULL || !PM_NODE_TYPE_P(body_node, PM_STATEMENTS_NODE) || (((const pm_statements_node_t *) body_node)->body.size != 1) || !PM_NODE_TYPE_P(((const pm_statements_node_t *) body_node)->body.nodes[0], PM_IF_NODE)) {
3500 rb_bug("mandatory_only?: not in method definition with plain statements");
3501 return COMPILE_NG;
3502 }
3503
3504 const pm_if_node_t *if_node = (const pm_if_node_t *) ((const pm_statements_node_t *) body_node)->body.nodes[0];
3505 if (if_node->predicate != ((const pm_node_t *) call_node)) {
3506 rb_bug("mandatory_only?: can't find mandatory node");
3507 return COMPILE_NG;
3508 }
3509
3510 pm_parameters_node_t parameters = {
3511 .base = parameters_node->base,
3512 .requireds = parameters_node->requireds
3513 };
3514
3515 const pm_def_node_t def = {
3516 .base = def_node->base,
3517 .name = def_node->name,
3518 .receiver = def_node->receiver,
3519 .parameters = &parameters,
3520 .body = (pm_node_t *) if_node->statements,
3521 .locals = {
3522 .ids = def_node->locals.ids,
3523 .size = parameters_node->requireds.size,
3524 .capacity = def_node->locals.capacity
3525 }
3526 };
3527
3528 pm_scope_node_t next_scope_node;
3529 pm_scope_node_init(&def.base, &next_scope_node, scope_node);
3530
3531 int error_state;
3532 const rb_iseq_t *mandatory_only_iseq = pm_iseq_new_with_opt(
3533 &next_scope_node,
3534 rb_iseq_base_label(iseq),
3535 rb_iseq_path(iseq),
3536 rb_iseq_realpath(iseq),
3537 node_location->line,
3538 NULL,
3539 0,
3540 ISEQ_TYPE_METHOD,
3541 ISEQ_COMPILE_DATA(iseq)->option,
3542 &error_state
3543 );
3544 RB_OBJ_WRITE(iseq, &ISEQ_BODY(iseq)->mandatory_only_iseq, (VALUE)mandatory_only_iseq);
3545
3546 if (error_state) {
3547 RUBY_ASSERT(ISEQ_BODY(iseq)->mandatory_only_iseq == NULL);
3548 rb_jump_tag(error_state);
3549 }
3550
3551 pm_scope_node_destroy(&next_scope_node);
3552 return COMPILE_OK;
3553}
3554
3555static int
3556pm_compile_builtin_function_call(rb_iseq_t *iseq, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, const pm_call_node_t *call_node, const pm_node_location_t *node_location, int popped, const rb_iseq_t *parent_block, const char *builtin_func)
3557{
3558 const pm_arguments_node_t *arguments = call_node->arguments;
3559
3560 if (parent_block != NULL) {
3561 COMPILE_ERROR(iseq, node_location->line, "should not call builtins here.");
3562 return COMPILE_NG;
3563 }
3564
3565#define BUILTIN_INLINE_PREFIX "_bi"
3566 char inline_func[sizeof(BUILTIN_INLINE_PREFIX) + DECIMAL_SIZE_OF(int)];
3567 bool cconst = false;
3568retry:;
3569 const struct rb_builtin_function *bf = iseq_builtin_function_lookup(iseq, builtin_func);
3570
3571 if (bf == NULL) {
3572 if (strcmp("cstmt!", builtin_func) == 0 || strcmp("cexpr!", builtin_func) == 0) {
3573 // ok
3574 }
3575 else if (strcmp("cconst!", builtin_func) == 0) {
3576 cconst = true;
3577 }
3578 else if (strcmp("cinit!", builtin_func) == 0) {
3579 // ignore
3580 return COMPILE_OK;
3581 }
3582 else if (strcmp("attr!", builtin_func) == 0) {
3583 return pm_compile_builtin_attr(iseq, scope_node, arguments, node_location);
3584 }
3585 else if (strcmp("arg!", builtin_func) == 0) {
3586 return pm_compile_builtin_arg(iseq, ret, scope_node, arguments, node_location, popped);
3587 }
3588 else if (strcmp("mandatory_only?", builtin_func) == 0) {
3589 if (popped) {
3590 rb_bug("mandatory_only? should be in if condition");
3591 }
3592 else if (!LIST_INSN_SIZE_ZERO(ret)) {
3593 rb_bug("mandatory_only? should be put on top");
3594 }
3595
3596 PUSH_INSN1(ret, *node_location, putobject, Qfalse);
3597 return pm_compile_builtin_mandatory_only_method(iseq, scope_node, call_node, node_location);
3598 }
3599 else if (1) {
3600 rb_bug("can't find builtin function:%s", builtin_func);
3601 }
3602 else {
3603 COMPILE_ERROR(iseq, node_location->line, "can't find builtin function:%s", builtin_func);
3604 return COMPILE_NG;
3605 }
3606
3607 int inline_index = node_location->line;
3608 snprintf(inline_func, sizeof(inline_func), BUILTIN_INLINE_PREFIX "%d", inline_index);
3609 builtin_func = inline_func;
3610 arguments = NULL;
3611 goto retry;
3612 }
3613
3614 if (cconst) {
3615 typedef VALUE(*builtin_func0)(void *, VALUE);
3616 VALUE const_val = (*(builtin_func0)(uintptr_t)bf->func_ptr)(NULL, Qnil);
3617 PUSH_INSN1(ret, *node_location, putobject, const_val);
3618 return COMPILE_OK;
3619 }
3620
3621 // fprintf(stderr, "func_name:%s -> %p\n", builtin_func, bf->func_ptr);
3622
3623 DECL_ANCHOR(args_seq);
3624
3625 int flags = 0;
3626 struct rb_callinfo_kwarg *keywords = NULL;
3627 int argc = pm_setup_args(arguments, call_node->block, &flags, &keywords, iseq, args_seq, scope_node, node_location);
3628
3629 if (argc != bf->argc) {
3630 COMPILE_ERROR(iseq, node_location->line, "argc is not match for builtin function:%s (expect %d but %d)", builtin_func, bf->argc, argc);
3631 return COMPILE_NG;
3632 }
3633
3634 unsigned int start_index;
3635 if (delegate_call_p(iseq, argc, args_seq, &start_index)) {
3636 PUSH_INSN2(ret, *node_location, opt_invokebuiltin_delegate, bf, INT2FIX(start_index));
3637 }
3638 else {
3639 PUSH_SEQ(ret, args_seq);
3640 PUSH_INSN1(ret, *node_location, invokebuiltin, bf);
3641 }
3642
3643 if (popped) PUSH_INSN(ret, *node_location, pop);
3644 return COMPILE_OK;
3645}
3646
3650static void
3651pm_compile_call(rb_iseq_t *iseq, const pm_call_node_t *call_node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, ID method_id, LABEL *start)
3652{
3653 const pm_location_t *message_loc = &call_node->message_loc;
3654 if (message_loc->start == NULL) message_loc = &call_node->base.location;
3655
3656 const pm_node_location_t location = PM_LOCATION_START_LOCATION(scope_node->parser, message_loc, call_node->base.node_id);
3657
3658 LABEL *else_label = NEW_LABEL(location.line);
3659 LABEL *end_label = NEW_LABEL(location.line);
3660 LABEL *retry_end_l = NEW_LABEL(location.line);
3661
3662 VALUE branches = Qfalse;
3663 rb_code_location_t code_location = { 0 };
3664 int node_id = location.node_id;
3665
3666 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
3667 if (PM_BRANCH_COVERAGE_P(iseq)) {
3668 const uint8_t *cursors[3] = {
3669 call_node->closing_loc.end,
3670 call_node->arguments == NULL ? NULL : call_node->arguments->base.location.end,
3671 call_node->message_loc.end
3672 };
3673
3674 const uint8_t *end_cursor = cursors[0];
3675 end_cursor = (end_cursor == NULL || cursors[1] == NULL) ? cursors[1] : (end_cursor > cursors[1] ? end_cursor : cursors[1]);
3676 end_cursor = (end_cursor == NULL || cursors[2] == NULL) ? cursors[2] : (end_cursor > cursors[2] ? end_cursor : cursors[2]);
3677 if (!end_cursor) end_cursor = call_node->closing_loc.end;
3678
3679 const pm_line_column_t start_location = PM_NODE_START_LINE_COLUMN(scope_node->parser, call_node);
3680 const pm_line_column_t end_location = pm_newline_list_line_column(&scope_node->parser->newline_list, end_cursor, scope_node->parser->start_line);
3681
3682 code_location = (rb_code_location_t) {
3683 .beg_pos = { .lineno = start_location.line, .column = start_location.column },
3684 .end_pos = { .lineno = end_location.line, .column = end_location.column }
3685 };
3686
3687 branches = decl_branch_base(iseq, PTR2NUM(call_node), &code_location, "&.");
3688 }
3689
3690 PUSH_INSN(ret, location, dup);
3691 PUSH_INSNL(ret, location, branchnil, else_label);
3692
3693 add_trace_branch_coverage(iseq, ret, &code_location, node_id, 0, "then", branches);
3694 }
3695
3696 LINK_ELEMENT *opt_new_prelude = LAST_ELEMENT(ret);
3697
3698 int flags = 0;
3699 struct rb_callinfo_kwarg *kw_arg = NULL;
3700
3701 int orig_argc = pm_setup_args(call_node->arguments, call_node->block, &flags, &kw_arg, iseq, ret, scope_node, &location);
3702 const rb_iseq_t *previous_block = ISEQ_COMPILE_DATA(iseq)->current_block;
3703 const rb_iseq_t *block_iseq = NULL;
3704
3705 if (call_node->block != NULL && PM_NODE_TYPE_P(call_node->block, PM_BLOCK_NODE)) {
3706 // Scope associated with the block
3707 pm_scope_node_t next_scope_node;
3708 pm_scope_node_init(call_node->block, &next_scope_node, scope_node);
3709
3710 block_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, pm_node_line_number(scope_node->parser, call_node->block));
3711 pm_scope_node_destroy(&next_scope_node);
3712 ISEQ_COMPILE_DATA(iseq)->current_block = block_iseq;
3713 }
3714 else {
3715 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_VARIABLE_CALL)) {
3716 flags |= VM_CALL_VCALL;
3717 }
3718
3719 if (!flags) {
3720 flags |= VM_CALL_ARGS_SIMPLE;
3721 }
3722 }
3723
3724 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) {
3725 flags |= VM_CALL_FCALL;
3726 }
3727
3728 if (!popped && PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE)) {
3729 if (flags & VM_CALL_ARGS_BLOCKARG) {
3730 PUSH_INSN1(ret, location, topn, INT2FIX(1));
3731 if (flags & VM_CALL_ARGS_SPLAT) {
3732 PUSH_INSN1(ret, location, putobject, INT2FIX(-1));
3733 PUSH_SEND_WITH_FLAG(ret, location, idAREF, INT2FIX(1), INT2FIX(0));
3734 }
3735 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 3));
3736 PUSH_INSN(ret, location, pop);
3737 }
3738 else if (flags & VM_CALL_ARGS_SPLAT) {
3739 PUSH_INSN(ret, location, dup);
3740 PUSH_INSN1(ret, location, putobject, INT2FIX(-1));
3741 PUSH_SEND_WITH_FLAG(ret, location, idAREF, INT2FIX(1), INT2FIX(0));
3742 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 2));
3743 PUSH_INSN(ret, location, pop);
3744 }
3745 else {
3746 PUSH_INSN1(ret, location, setn, INT2FIX(orig_argc + 1));
3747 }
3748 }
3749
3750 if ((flags & VM_CALL_KW_SPLAT) && (flags & VM_CALL_ARGS_BLOCKARG) && !(flags & VM_CALL_KW_SPLAT_MUT)) {
3751 PUSH_INSN(ret, location, splatkw);
3752 }
3753
3754 LABEL *not_basic_new = NEW_LABEL(location.line);
3755 LABEL *not_basic_new_finish = NEW_LABEL(location.line);
3756
3757 bool inline_new = ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction &&
3758 method_id == rb_intern("new") &&
3759 call_node->block == NULL &&
3760 (flags & VM_CALL_ARGS_BLOCKARG) == 0;
3761
3762 if (inline_new) {
3763 if (LAST_ELEMENT(ret) == opt_new_prelude) {
3764 PUSH_INSN(ret, location, putnil);
3765 PUSH_INSN(ret, location, swap);
3766 }
3767 else {
3768 ELEM_INSERT_NEXT(opt_new_prelude, &new_insn_body(iseq, location.line, location.node_id, BIN(swap), 0)->link);
3769 ELEM_INSERT_NEXT(opt_new_prelude, &new_insn_body(iseq, location.line, location.node_id, BIN(putnil), 0)->link);
3770 }
3771
3772 // Jump unless the receiver uses the "basic" implementation of "new"
3773 VALUE ci;
3774 if (flags & VM_CALL_FORWARDING) {
3775 ci = (VALUE)new_callinfo(iseq, method_id, orig_argc + 1, flags, kw_arg, 0);
3776 }
3777 else {
3778 ci = (VALUE)new_callinfo(iseq, method_id, orig_argc, flags, kw_arg, 0);
3779 }
3780
3781 PUSH_INSN2(ret, location, opt_new, ci, not_basic_new);
3782 LABEL_REF(not_basic_new);
3783 // optimized path
3784 PUSH_SEND_R(ret, location, rb_intern("initialize"), INT2FIX(orig_argc), block_iseq, INT2FIX(flags | VM_CALL_FCALL), kw_arg);
3785 PUSH_INSNL(ret, location, jump, not_basic_new_finish);
3786
3787 PUSH_LABEL(ret, not_basic_new);
3788 // Fall back to normal send
3789 PUSH_SEND_R(ret, location, method_id, INT2FIX(orig_argc), block_iseq, INT2FIX(flags), kw_arg);
3790 PUSH_INSN(ret, location, swap);
3791
3792 PUSH_LABEL(ret, not_basic_new_finish);
3793 PUSH_INSN(ret, location, pop);
3794 }
3795 else {
3796 PUSH_SEND_R(ret, location, method_id, INT2FIX(orig_argc), block_iseq, INT2FIX(flags), kw_arg);
3797 }
3798
3799 if (block_iseq && ISEQ_BODY(block_iseq)->catch_table) {
3800 pm_compile_retry_end_label(iseq, ret, retry_end_l);
3801 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, start, retry_end_l, block_iseq, retry_end_l);
3802 }
3803
3804 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
3805 PUSH_INSNL(ret, location, jump, end_label);
3806 PUSH_LABEL(ret, else_label);
3807 add_trace_branch_coverage(iseq, ret, &code_location, node_id, 1, "else", branches);
3808 PUSH_LABEL(ret, end_label);
3809 }
3810
3811 if (PM_NODE_FLAG_P(call_node, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE) && !popped) {
3812 PUSH_INSN(ret, location, pop);
3813 }
3814
3815 if (popped) PUSH_INSN(ret, location, pop);
3816 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
3817}
3818
3823static inline VALUE
3824pm_compile_back_reference_ref(const pm_back_reference_read_node_t *node)
3825{
3826 const char *type = (const char *) (node->base.location.start + 1);
3827
3828 // Since a back reference is `$<char>`, Ruby represents the ID as an
3829 // rb_intern on the value after the `$`.
3830 return INT2FIX(rb_intern2(type, 1)) << 1 | 1;
3831}
3832
3837static inline VALUE
3838pm_compile_numbered_reference_ref(const pm_numbered_reference_read_node_t *node)
3839{
3840 return INT2FIX(node->number << 1);
3841}
3842
3843static void
3844pm_compile_defined_expr0(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition, LABEL **lfinish, bool explicit_receiver)
3845{
3846#define PUSH_VAL(type) (in_condition ? Qtrue : rb_iseq_defined_string(type))
3847
3848 // in_condition is the same as compile.c's needstr
3849 enum defined_type dtype = DEFINED_NOT_DEFINED;
3850 const pm_node_location_t location = *node_location;
3851
3852 switch (PM_NODE_TYPE(node)) {
3853/* DEFINED_NIL ****************************************************************/
3854 case PM_NIL_NODE:
3855 // defined?(nil)
3856 // ^^^
3857 dtype = DEFINED_NIL;
3858 break;
3859/* DEFINED_IVAR ***************************************************************/
3860 case PM_INSTANCE_VARIABLE_READ_NODE: {
3861 // defined?(@a)
3862 // ^^
3864 ID name = pm_constant_id_lookup(scope_node, cast->name);
3865
3866 PUSH_INSN3(ret, location, definedivar, ID2SYM(name), get_ivar_ic_value(iseq, name), PUSH_VAL(DEFINED_IVAR));
3867
3868 return;
3869 }
3870/* DEFINED_LVAR ***************************************************************/
3871 case PM_LOCAL_VARIABLE_READ_NODE:
3872 // a = 1; defined?(a)
3873 // ^
3874 case PM_IT_LOCAL_VARIABLE_READ_NODE:
3875 // 1.then { defined?(it) }
3876 // ^^
3877 dtype = DEFINED_LVAR;
3878 break;
3879/* DEFINED_GVAR ***************************************************************/
3880 case PM_GLOBAL_VARIABLE_READ_NODE: {
3881 // defined?($a)
3882 // ^^
3884 ID name = pm_constant_id_lookup(scope_node, cast->name);
3885
3886 PUSH_INSN(ret, location, putnil);
3887 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_GVAR), ID2SYM(name), PUSH_VAL(DEFINED_GVAR));
3888
3889 return;
3890 }
3891/* DEFINED_CVAR ***************************************************************/
3892 case PM_CLASS_VARIABLE_READ_NODE: {
3893 // defined?(@@a)
3894 // ^^^
3896 ID name = pm_constant_id_lookup(scope_node, cast->name);
3897
3898 PUSH_INSN(ret, location, putnil);
3899 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CVAR), ID2SYM(name), PUSH_VAL(DEFINED_CVAR));
3900
3901 return;
3902 }
3903/* DEFINED_CONST **************************************************************/
3904 case PM_CONSTANT_READ_NODE: {
3905 // defined?(A)
3906 // ^
3907 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
3908 ID name = pm_constant_id_lookup(scope_node, cast->name);
3909
3910 PUSH_INSN(ret, location, putnil);
3911 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST), ID2SYM(name), PUSH_VAL(DEFINED_CONST));
3912
3913 return;
3914 }
3915/* DEFINED_YIELD **************************************************************/
3916 case PM_YIELD_NODE:
3917 // defined?(yield)
3918 // ^^^^^
3919 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
3920
3921 PUSH_INSN(ret, location, putnil);
3922 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_YIELD), 0, PUSH_VAL(DEFINED_YIELD));
3923
3924 return;
3925/* DEFINED_ZSUPER *************************************************************/
3926 case PM_SUPER_NODE: {
3927 // defined?(super 1, 2)
3928 // ^^^^^^^^^^
3929 const pm_super_node_t *cast = (const pm_super_node_t *) node;
3930
3931 if (cast->block != NULL && !PM_NODE_TYPE_P(cast->block, PM_BLOCK_ARGUMENT_NODE)) {
3932 dtype = DEFINED_EXPR;
3933 break;
3934 }
3935
3936 PUSH_INSN(ret, location, putnil);
3937 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_ZSUPER), 0, PUSH_VAL(DEFINED_ZSUPER));
3938 return;
3939 }
3940 case PM_FORWARDING_SUPER_NODE: {
3941 // defined?(super)
3942 // ^^^^^
3943 const pm_forwarding_super_node_t *cast = (const pm_forwarding_super_node_t *) node;
3944
3945 if (cast->block != NULL) {
3946 dtype = DEFINED_EXPR;
3947 break;
3948 }
3949
3950 PUSH_INSN(ret, location, putnil);
3951 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_ZSUPER), 0, PUSH_VAL(DEFINED_ZSUPER));
3952 return;
3953 }
3954/* DEFINED_SELF ***************************************************************/
3955 case PM_SELF_NODE:
3956 // defined?(self)
3957 // ^^^^
3958 dtype = DEFINED_SELF;
3959 break;
3960/* DEFINED_TRUE ***************************************************************/
3961 case PM_TRUE_NODE:
3962 // defined?(true)
3963 // ^^^^
3964 dtype = DEFINED_TRUE;
3965 break;
3966/* DEFINED_FALSE **************************************************************/
3967 case PM_FALSE_NODE:
3968 // defined?(false)
3969 // ^^^^^
3970 dtype = DEFINED_FALSE;
3971 break;
3972/* DEFINED_ASGN ***************************************************************/
3973 case PM_CALL_AND_WRITE_NODE:
3974 // defined?(a.a &&= 1)
3975 // ^^^^^^^^^
3976 case PM_CALL_OPERATOR_WRITE_NODE:
3977 // defined?(a.a += 1)
3978 // ^^^^^^^^
3979 case PM_CALL_OR_WRITE_NODE:
3980 // defined?(a.a ||= 1)
3981 // ^^^^^^^^^
3982 case PM_CLASS_VARIABLE_AND_WRITE_NODE:
3983 // defined?(@@a &&= 1)
3984 // ^^^^^^^^^
3985 case PM_CLASS_VARIABLE_OPERATOR_WRITE_NODE:
3986 // defined?(@@a += 1)
3987 // ^^^^^^^^
3988 case PM_CLASS_VARIABLE_OR_WRITE_NODE:
3989 // defined?(@@a ||= 1)
3990 // ^^^^^^^^^
3991 case PM_CLASS_VARIABLE_WRITE_NODE:
3992 // defined?(@@a = 1)
3993 // ^^^^^^^
3994 case PM_CONSTANT_AND_WRITE_NODE:
3995 // defined?(A &&= 1)
3996 // ^^^^^^^
3997 case PM_CONSTANT_OPERATOR_WRITE_NODE:
3998 // defined?(A += 1)
3999 // ^^^^^^
4000 case PM_CONSTANT_OR_WRITE_NODE:
4001 // defined?(A ||= 1)
4002 // ^^^^^^^
4003 case PM_CONSTANT_PATH_AND_WRITE_NODE:
4004 // defined?(A::A &&= 1)
4005 // ^^^^^^^^^^
4006 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE:
4007 // defined?(A::A += 1)
4008 // ^^^^^^^^^
4009 case PM_CONSTANT_PATH_OR_WRITE_NODE:
4010 // defined?(A::A ||= 1)
4011 // ^^^^^^^^^^
4012 case PM_CONSTANT_PATH_WRITE_NODE:
4013 // defined?(A::A = 1)
4014 // ^^^^^^^^
4015 case PM_CONSTANT_WRITE_NODE:
4016 // defined?(A = 1)
4017 // ^^^^^
4018 case PM_GLOBAL_VARIABLE_AND_WRITE_NODE:
4019 // defined?($a &&= 1)
4020 // ^^^^^^^^
4021 case PM_GLOBAL_VARIABLE_OPERATOR_WRITE_NODE:
4022 // defined?($a += 1)
4023 // ^^^^^^^
4024 case PM_GLOBAL_VARIABLE_OR_WRITE_NODE:
4025 // defined?($a ||= 1)
4026 // ^^^^^^^^
4027 case PM_GLOBAL_VARIABLE_WRITE_NODE:
4028 // defined?($a = 1)
4029 // ^^^^^^
4030 case PM_INDEX_AND_WRITE_NODE:
4031 // defined?(a[1] &&= 1)
4032 // ^^^^^^^^^^
4033 case PM_INDEX_OPERATOR_WRITE_NODE:
4034 // defined?(a[1] += 1)
4035 // ^^^^^^^^^
4036 case PM_INDEX_OR_WRITE_NODE:
4037 // defined?(a[1] ||= 1)
4038 // ^^^^^^^^^^
4039 case PM_INSTANCE_VARIABLE_AND_WRITE_NODE:
4040 // defined?(@a &&= 1)
4041 // ^^^^^^^^
4042 case PM_INSTANCE_VARIABLE_OPERATOR_WRITE_NODE:
4043 // defined?(@a += 1)
4044 // ^^^^^^^
4045 case PM_INSTANCE_VARIABLE_OR_WRITE_NODE:
4046 // defined?(@a ||= 1)
4047 // ^^^^^^^^
4048 case PM_INSTANCE_VARIABLE_WRITE_NODE:
4049 // defined?(@a = 1)
4050 // ^^^^^^
4051 case PM_LOCAL_VARIABLE_AND_WRITE_NODE:
4052 // defined?(a &&= 1)
4053 // ^^^^^^^
4054 case PM_LOCAL_VARIABLE_OPERATOR_WRITE_NODE:
4055 // defined?(a += 1)
4056 // ^^^^^^
4057 case PM_LOCAL_VARIABLE_OR_WRITE_NODE:
4058 // defined?(a ||= 1)
4059 // ^^^^^^^
4060 case PM_LOCAL_VARIABLE_WRITE_NODE:
4061 // defined?(a = 1)
4062 // ^^^^^
4063 case PM_MULTI_WRITE_NODE:
4064 // defined?((a, = 1))
4065 // ^^^^^^
4066 dtype = DEFINED_ASGN;
4067 break;
4068/* DEFINED_EXPR ***************************************************************/
4069 case PM_ALIAS_GLOBAL_VARIABLE_NODE:
4070 // defined?((alias $a $b))
4071 // ^^^^^^^^^^^
4072 case PM_ALIAS_METHOD_NODE:
4073 // defined?((alias a b))
4074 // ^^^^^^^^^
4075 case PM_AND_NODE:
4076 // defined?(a and b)
4077 // ^^^^^^^
4078 case PM_BREAK_NODE:
4079 // defined?(break 1)
4080 // ^^^^^^^
4081 case PM_CASE_MATCH_NODE:
4082 // defined?(case 1; in 1; end)
4083 // ^^^^^^^^^^^^^^^^^
4084 case PM_CASE_NODE:
4085 // defined?(case 1; when 1; end)
4086 // ^^^^^^^^^^^^^^^^^^^
4087 case PM_CLASS_NODE:
4088 // defined?(class Foo; end)
4089 // ^^^^^^^^^^^^^^
4090 case PM_DEF_NODE:
4091 // defined?(def a() end)
4092 // ^^^^^^^^^^^
4093 case PM_DEFINED_NODE:
4094 // defined?(defined?(a))
4095 // ^^^^^^^^^^^
4096 case PM_FLIP_FLOP_NODE:
4097 // defined?(not (a .. b))
4098 // ^^^^^^
4099 case PM_FLOAT_NODE:
4100 // defined?(1.0)
4101 // ^^^
4102 case PM_FOR_NODE:
4103 // defined?(for a in 1 do end)
4104 // ^^^^^^^^^^^^^^^^^
4105 case PM_IF_NODE:
4106 // defined?(if a then end)
4107 // ^^^^^^^^^^^^^
4108 case PM_IMAGINARY_NODE:
4109 // defined?(1i)
4110 // ^^
4111 case PM_INTEGER_NODE:
4112 // defined?(1)
4113 // ^
4114 case PM_INTERPOLATED_MATCH_LAST_LINE_NODE:
4115 // defined?(not /#{1}/)
4116 // ^^^^^^
4117 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE:
4118 // defined?(/#{1}/)
4119 // ^^^^^^
4120 case PM_INTERPOLATED_STRING_NODE:
4121 // defined?("#{1}")
4122 // ^^^^^^
4123 case PM_INTERPOLATED_SYMBOL_NODE:
4124 // defined?(:"#{1}")
4125 // ^^^^^^^
4126 case PM_INTERPOLATED_X_STRING_NODE:
4127 // defined?(`#{1}`)
4128 // ^^^^^^
4129 case PM_LAMBDA_NODE:
4130 // defined?(-> {})
4131 // ^^^^^
4132 case PM_MATCH_LAST_LINE_NODE:
4133 // defined?(not //)
4134 // ^^^^^^
4135 case PM_MATCH_PREDICATE_NODE:
4136 // defined?(1 in 1)
4137 // ^^^^^^
4138 case PM_MATCH_REQUIRED_NODE:
4139 // defined?(1 => 1)
4140 // ^^^^^^
4141 case PM_MATCH_WRITE_NODE:
4142 // defined?(/(?<a>)/ =~ "")
4143 // ^^^^^^^^^^^^^^
4144 case PM_MODULE_NODE:
4145 // defined?(module A end)
4146 // ^^^^^^^^^^^^
4147 case PM_NEXT_NODE:
4148 // defined?(next 1)
4149 // ^^^^^^
4150 case PM_OR_NODE:
4151 // defined?(a or b)
4152 // ^^^^^^
4153 case PM_POST_EXECUTION_NODE:
4154 // defined?((END {}))
4155 // ^^^^^^^^
4156 case PM_RANGE_NODE:
4157 // defined?(1..1)
4158 // ^^^^
4159 case PM_RATIONAL_NODE:
4160 // defined?(1r)
4161 // ^^
4162 case PM_REDO_NODE:
4163 // defined?(redo)
4164 // ^^^^
4165 case PM_REGULAR_EXPRESSION_NODE:
4166 // defined?(//)
4167 // ^^
4168 case PM_RESCUE_MODIFIER_NODE:
4169 // defined?(a rescue b)
4170 // ^^^^^^^^^^
4171 case PM_RETRY_NODE:
4172 // defined?(retry)
4173 // ^^^^^
4174 case PM_RETURN_NODE:
4175 // defined?(return)
4176 // ^^^^^^
4177 case PM_SINGLETON_CLASS_NODE:
4178 // defined?(class << self; end)
4179 // ^^^^^^^^^^^^^^^^^^
4180 case PM_SOURCE_ENCODING_NODE:
4181 // defined?(__ENCODING__)
4182 // ^^^^^^^^^^^^
4183 case PM_SOURCE_FILE_NODE:
4184 // defined?(__FILE__)
4185 // ^^^^^^^^
4186 case PM_SOURCE_LINE_NODE:
4187 // defined?(__LINE__)
4188 // ^^^^^^^^
4189 case PM_STRING_NODE:
4190 // defined?("")
4191 // ^^
4192 case PM_SYMBOL_NODE:
4193 // defined?(:a)
4194 // ^^
4195 case PM_UNDEF_NODE:
4196 // defined?((undef a))
4197 // ^^^^^^^
4198 case PM_UNLESS_NODE:
4199 // defined?(unless a then end)
4200 // ^^^^^^^^^^^^^^^^^
4201 case PM_UNTIL_NODE:
4202 // defined?(until a do end)
4203 // ^^^^^^^^^^^^^^
4204 case PM_WHILE_NODE:
4205 // defined?(while a do end)
4206 // ^^^^^^^^^^^^^^
4207 case PM_X_STRING_NODE:
4208 // defined?(``)
4209 // ^^
4210 dtype = DEFINED_EXPR;
4211 break;
4212/* DEFINED_REF ****************************************************************/
4213 case PM_BACK_REFERENCE_READ_NODE: {
4214 // defined?($+)
4215 // ^^
4217 VALUE ref = pm_compile_back_reference_ref(cast);
4218
4219 PUSH_INSN(ret, location, putnil);
4220 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_REF), ref, PUSH_VAL(DEFINED_GVAR));
4221
4222 return;
4223 }
4224 case PM_NUMBERED_REFERENCE_READ_NODE: {
4225 // defined?($1)
4226 // ^^
4228 VALUE ref = pm_compile_numbered_reference_ref(cast);
4229
4230 PUSH_INSN(ret, location, putnil);
4231 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_REF), ref, PUSH_VAL(DEFINED_GVAR));
4232
4233 return;
4234 }
4235/* DEFINED_CONST_FROM *********************************************************/
4236 case PM_CONSTANT_PATH_NODE: {
4237 // defined?(A::A)
4238 // ^^^^
4239 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
4240 ID name = pm_constant_id_lookup(scope_node, cast->name);
4241
4242 if (cast->parent != NULL) {
4243 if (!lfinish[1]) lfinish[1] = NEW_LABEL(location.line);
4244 pm_compile_defined_expr0(iseq, cast->parent, node_location, ret, popped, scope_node, true, lfinish, false);
4245
4246 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4247 PM_COMPILE(cast->parent);
4248 }
4249 else {
4250 PUSH_INSN1(ret, location, putobject, rb_cObject);
4251 }
4252
4253 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST_FROM), ID2SYM(name), PUSH_VAL(DEFINED_CONST));
4254 return;
4255 }
4256/* Containers *****************************************************************/
4257 case PM_BEGIN_NODE: {
4258 // defined?(begin end)
4259 // ^^^^^^^^^
4260 const pm_begin_node_t *cast = (const pm_begin_node_t *) node;
4261
4262 if (cast->rescue_clause == NULL && cast->ensure_clause == NULL && cast->else_clause == NULL) {
4263 if (cast->statements == NULL) {
4264 // If we have empty statements, then we want to return "nil".
4265 dtype = DEFINED_NIL;
4266 }
4267 else if (cast->statements->body.size == 1) {
4268 // If we have a begin node that is wrapping a single statement
4269 // then we want to recurse down to that statement and compile
4270 // it.
4271 pm_compile_defined_expr0(iseq, cast->statements->body.nodes[0], node_location, ret, popped, scope_node, in_condition, lfinish, false);
4272 return;
4273 }
4274 else {
4275 // Otherwise, we have a begin wrapping multiple statements, in
4276 // which case this is defined as "expression".
4277 dtype = DEFINED_EXPR;
4278 }
4279 } else {
4280 // If we have any of the other clauses besides the main begin/end,
4281 // this is defined as "expression".
4282 dtype = DEFINED_EXPR;
4283 }
4284
4285 break;
4286 }
4287 case PM_PARENTHESES_NODE: {
4288 // defined?(())
4289 // ^^
4290 const pm_parentheses_node_t *cast = (const pm_parentheses_node_t *) node;
4291
4292 if (cast->body == NULL) {
4293 // If we have empty parentheses, then we want to return "nil".
4294 dtype = DEFINED_NIL;
4295 }
4296 else if (PM_NODE_TYPE_P(cast->body, PM_STATEMENTS_NODE) && !PM_NODE_FLAG_P(cast, PM_PARENTHESES_NODE_FLAGS_MULTIPLE_STATEMENTS)) {
4297 // If we have a parentheses node that is wrapping a single statement
4298 // then we want to recurse down to that statement and compile it.
4299 pm_compile_defined_expr0(iseq, ((const pm_statements_node_t *) cast->body)->body.nodes[0], node_location, ret, popped, scope_node, in_condition, lfinish, false);
4300 return;
4301 }
4302 else {
4303 // Otherwise, we have parentheses wrapping multiple statements, in
4304 // which case this is defined as "expression".
4305 dtype = DEFINED_EXPR;
4306 }
4307
4308 break;
4309 }
4310 case PM_ARRAY_NODE: {
4311 // defined?([])
4312 // ^^
4313 const pm_array_node_t *cast = (const pm_array_node_t *) node;
4314
4315 if (cast->elements.size > 0 && !lfinish[1]) {
4316 lfinish[1] = NEW_LABEL(location.line);
4317 }
4318
4319 for (size_t index = 0; index < cast->elements.size; index++) {
4320 pm_compile_defined_expr0(iseq, cast->elements.nodes[index], node_location, ret, popped, scope_node, true, lfinish, false);
4321 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4322 }
4323
4324 dtype = DEFINED_EXPR;
4325 break;
4326 }
4327 case PM_HASH_NODE:
4328 // defined?({ a: 1 })
4329 // ^^^^^^^^
4330 case PM_KEYWORD_HASH_NODE: {
4331 // defined?(a(a: 1))
4332 // ^^^^
4333 const pm_node_list_t *elements;
4334
4335 if (PM_NODE_TYPE_P(node, PM_HASH_NODE)) {
4336 elements = &((const pm_hash_node_t *) node)->elements;
4337 }
4338 else {
4339 elements = &((const pm_keyword_hash_node_t *) node)->elements;
4340 }
4341
4342 if (elements->size > 0 && !lfinish[1]) {
4343 lfinish[1] = NEW_LABEL(location.line);
4344 }
4345
4346 for (size_t index = 0; index < elements->size; index++) {
4347 pm_compile_defined_expr0(iseq, elements->nodes[index], node_location, ret, popped, scope_node, true, lfinish, false);
4348 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4349 }
4350
4351 dtype = DEFINED_EXPR;
4352 break;
4353 }
4354 case PM_ASSOC_NODE: {
4355 // defined?({ a: 1 })
4356 // ^^^^
4357 const pm_assoc_node_t *cast = (const pm_assoc_node_t *) node;
4358
4359 pm_compile_defined_expr0(iseq, cast->key, node_location, ret, popped, scope_node, true, lfinish, false);
4360 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4361 pm_compile_defined_expr0(iseq, cast->value, node_location, ret, popped, scope_node, true, lfinish, false);
4362
4363 return;
4364 }
4365 case PM_ASSOC_SPLAT_NODE: {
4366 // defined?({ **a })
4367 // ^^^^
4368 const pm_assoc_splat_node_t *cast = (const pm_assoc_splat_node_t *) node;
4369
4370 if (cast->value == NULL) {
4371 dtype = DEFINED_EXPR;
4372 break;
4373 }
4374
4375 pm_compile_defined_expr0(iseq, cast->value, node_location, ret, popped, scope_node, true, lfinish, false);
4376 return;
4377 }
4378 case PM_IMPLICIT_NODE: {
4379 // defined?({ a: })
4380 // ^^
4381 const pm_implicit_node_t *cast = (const pm_implicit_node_t *) node;
4382 pm_compile_defined_expr0(iseq, cast->value, node_location, ret, popped, scope_node, in_condition, lfinish, false);
4383 return;
4384 }
4385 case PM_CALL_NODE: {
4386#define BLOCK_P(cast) ((cast)->block != NULL && PM_NODE_TYPE_P((cast)->block, PM_BLOCK_NODE))
4387
4388 // defined?(a(1, 2, 3))
4389 // ^^^^^^^^^^
4390 const pm_call_node_t *cast = ((const pm_call_node_t *) node);
4391
4392 if (BLOCK_P(cast)) {
4393 dtype = DEFINED_EXPR;
4394 break;
4395 }
4396
4397 if (cast->receiver || cast->arguments || (cast->block && PM_NODE_TYPE_P(cast->block, PM_BLOCK_ARGUMENT_NODE))) {
4398 if (!lfinish[1]) lfinish[1] = NEW_LABEL(location.line);
4399 if (!lfinish[2]) lfinish[2] = NEW_LABEL(location.line);
4400 }
4401
4402 if (cast->arguments) {
4403 pm_compile_defined_expr0(iseq, (const pm_node_t *) cast->arguments, node_location, ret, popped, scope_node, true, lfinish, false);
4404 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4405 }
4406
4407 if (cast->block && PM_NODE_TYPE_P(cast->block, PM_BLOCK_ARGUMENT_NODE)) {
4408 pm_compile_defined_expr0(iseq, cast->block, node_location, ret, popped, scope_node, true, lfinish, false);
4409 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4410 }
4411
4412 if (cast->receiver) {
4413 if (PM_NODE_TYPE_P(cast->receiver, PM_CALL_NODE) && !BLOCK_P((const pm_call_node_t *) cast->receiver)) {
4414 // Special behavior here where we chain calls together. This is
4415 // the only path that sets explicit_receiver to true.
4416 pm_compile_defined_expr0(iseq, cast->receiver, node_location, ret, popped, scope_node, true, lfinish, true);
4417 PUSH_INSNL(ret, location, branchunless, lfinish[2]);
4418
4419 const pm_call_node_t *receiver = (const pm_call_node_t *) cast->receiver;
4420 ID method_id = pm_constant_id_lookup(scope_node, receiver->name);
4421
4422 pm_compile_call(iseq, receiver, ret, popped, scope_node, method_id, NULL);
4423 }
4424 else {
4425 pm_compile_defined_expr0(iseq, cast->receiver, node_location, ret, popped, scope_node, true, lfinish, false);
4426 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4427 PM_COMPILE(cast->receiver);
4428 }
4429
4430 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
4431
4432 if (explicit_receiver) PUSH_INSN(ret, location, dup);
4433 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_METHOD), rb_id2sym(method_id), PUSH_VAL(DEFINED_METHOD));
4434 }
4435 else {
4436 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
4437
4438 PUSH_INSN(ret, location, putself);
4439 if (explicit_receiver) PUSH_INSN(ret, location, dup);
4440
4441 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_FUNC), rb_id2sym(method_id), PUSH_VAL(DEFINED_METHOD));
4442 }
4443
4444 return;
4445
4446#undef BLOCK_P
4447 }
4448 case PM_ARGUMENTS_NODE: {
4449 // defined?(a(1, 2, 3))
4450 // ^^^^^^^
4451 const pm_arguments_node_t *cast = (const pm_arguments_node_t *) node;
4452
4453 for (size_t index = 0; index < cast->arguments.size; index++) {
4454 pm_compile_defined_expr0(iseq, cast->arguments.nodes[index], node_location, ret, popped, scope_node, in_condition, lfinish, false);
4455 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4456 }
4457
4458 dtype = DEFINED_EXPR;
4459 break;
4460 }
4461 case PM_BLOCK_ARGUMENT_NODE:
4462 // defined?(a(&b))
4463 // ^^
4464 dtype = DEFINED_EXPR;
4465 break;
4466 case PM_FORWARDING_ARGUMENTS_NODE:
4467 // def a(...) = defined?(a(...))
4468 // ^^^
4469 dtype = DEFINED_EXPR;
4470 break;
4471 case PM_SPLAT_NODE: {
4472 // def a(*) = defined?(a(*))
4473 // ^
4474 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
4475
4476 if (cast->expression == NULL) {
4477 dtype = DEFINED_EXPR;
4478 break;
4479 }
4480
4481 pm_compile_defined_expr0(iseq, cast->expression, node_location, ret, popped, scope_node, in_condition, lfinish, false);
4482
4483 if (!lfinish[1]) lfinish[1] = NEW_LABEL(location.line);
4484 PUSH_INSNL(ret, location, branchunless, lfinish[1]);
4485
4486 dtype = DEFINED_EXPR;
4487 break;
4488 }
4489 case PM_SHAREABLE_CONSTANT_NODE:
4490 // # shareable_constant_value: literal
4491 // defined?(A = 1)
4492 // ^^^^^
4493 pm_compile_defined_expr0(iseq, ((const pm_shareable_constant_node_t *) node)->write, node_location, ret, popped, scope_node, in_condition, lfinish, explicit_receiver);
4494 return;
4495/* Unreachable (parameters) ***************************************************/
4496 case PM_BLOCK_LOCAL_VARIABLE_NODE:
4497 case PM_BLOCK_PARAMETER_NODE:
4498 case PM_BLOCK_PARAMETERS_NODE:
4499 case PM_FORWARDING_PARAMETER_NODE:
4500 case PM_IMPLICIT_REST_NODE:
4501 case PM_IT_PARAMETERS_NODE:
4502 case PM_PARAMETERS_NODE:
4503 case PM_KEYWORD_REST_PARAMETER_NODE:
4504 case PM_NO_KEYWORDS_PARAMETER_NODE:
4505 case PM_NUMBERED_PARAMETERS_NODE:
4506 case PM_OPTIONAL_KEYWORD_PARAMETER_NODE:
4507 case PM_OPTIONAL_PARAMETER_NODE:
4508 case PM_REQUIRED_KEYWORD_PARAMETER_NODE:
4509 case PM_REQUIRED_PARAMETER_NODE:
4510 case PM_REST_PARAMETER_NODE:
4511/* Unreachable (pattern matching) *********************************************/
4512 case PM_ALTERNATION_PATTERN_NODE:
4513 case PM_ARRAY_PATTERN_NODE:
4514 case PM_CAPTURE_PATTERN_NODE:
4515 case PM_FIND_PATTERN_NODE:
4516 case PM_HASH_PATTERN_NODE:
4517 case PM_PINNED_EXPRESSION_NODE:
4518 case PM_PINNED_VARIABLE_NODE:
4519/* Unreachable (indirect writes) **********************************************/
4520 case PM_CALL_TARGET_NODE:
4521 case PM_CLASS_VARIABLE_TARGET_NODE:
4522 case PM_CONSTANT_PATH_TARGET_NODE:
4523 case PM_CONSTANT_TARGET_NODE:
4524 case PM_GLOBAL_VARIABLE_TARGET_NODE:
4525 case PM_INDEX_TARGET_NODE:
4526 case PM_INSTANCE_VARIABLE_TARGET_NODE:
4527 case PM_LOCAL_VARIABLE_TARGET_NODE:
4528 case PM_MULTI_TARGET_NODE:
4529/* Unreachable (clauses) ******************************************************/
4530 case PM_ELSE_NODE:
4531 case PM_ENSURE_NODE:
4532 case PM_IN_NODE:
4533 case PM_RESCUE_NODE:
4534 case PM_WHEN_NODE:
4535/* Unreachable (miscellaneous) ************************************************/
4536 case PM_BLOCK_NODE:
4537 case PM_EMBEDDED_STATEMENTS_NODE:
4538 case PM_EMBEDDED_VARIABLE_NODE:
4539 case PM_MISSING_NODE:
4540 case PM_PRE_EXECUTION_NODE:
4541 case PM_PROGRAM_NODE:
4542 case PM_SCOPE_NODE:
4543 case PM_STATEMENTS_NODE:
4544 rb_bug("Unreachable node in defined?: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
4545 }
4546
4547 RUBY_ASSERT(dtype != DEFINED_NOT_DEFINED);
4548 PUSH_INSN1(ret, location, putobject, PUSH_VAL(dtype));
4549
4550#undef PUSH_VAL
4551}
4552
4553static void
4554pm_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition, LABEL **lfinish)
4555{
4556 LINK_ELEMENT *lcur = ret->last;
4557 pm_compile_defined_expr0(iseq, node, node_location, ret, popped, scope_node, in_condition, lfinish, false);
4558
4559 if (lfinish[1]) {
4560 LABEL *lstart = NEW_LABEL(node_location->line);
4561 LABEL *lend = NEW_LABEL(node_location->line);
4562
4564 rb_iseq_new_with_callback_new_callback(build_defined_rescue_iseq, NULL);
4565
4566 const rb_iseq_t *rescue = new_child_iseq_with_callback(
4567 iseq,
4568 ifunc,
4569 rb_str_concat(rb_str_new2("defined guard in "), ISEQ_BODY(iseq)->location.label),
4570 iseq,
4571 ISEQ_TYPE_RESCUE,
4572 0
4573 );
4574
4575 lstart->rescued = LABEL_RESCUE_BEG;
4576 lend->rescued = LABEL_RESCUE_END;
4577
4578 APPEND_LABEL(ret, lcur, lstart);
4579 PUSH_LABEL(ret, lend);
4580 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue, lfinish[1]);
4581 }
4582}
4583
4584static void
4585pm_compile_defined_expr(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node, bool in_condition)
4586{
4587 LABEL *lfinish[3];
4588 LINK_ELEMENT *last = ret->last;
4589
4590 lfinish[0] = NEW_LABEL(node_location->line);
4591 lfinish[1] = 0;
4592 lfinish[2] = 0;
4593
4594 if (!popped) {
4595 pm_defined_expr(iseq, node, node_location, ret, popped, scope_node, in_condition, lfinish);
4596 }
4597
4598 if (lfinish[1]) {
4599 ELEM_INSERT_NEXT(last, &new_insn_body(iseq, node_location->line, node_location->node_id, BIN(putnil), 0)->link);
4600 PUSH_INSN(ret, *node_location, swap);
4601
4602 if (lfinish[2]) PUSH_LABEL(ret, lfinish[2]);
4603 PUSH_INSN(ret, *node_location, pop);
4604 PUSH_LABEL(ret, lfinish[1]);
4605
4606 }
4607
4608 PUSH_LABEL(ret, lfinish[0]);
4609}
4610
4611// This is exactly the same as add_ensure_iseq, except it compiled
4612// the node as a Prism node, and not a CRuby node
4613static void
4614pm_add_ensure_iseq(LINK_ANCHOR *const ret, rb_iseq_t *iseq, int is_return, pm_scope_node_t *scope_node)
4615{
4616 RUBY_ASSERT(can_add_ensure_iseq(iseq));
4617
4619 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack;
4620 struct iseq_compile_data_ensure_node_stack *prev_enlp = enlp;
4621 DECL_ANCHOR(ensure);
4622
4623 while (enlp) {
4624 if (enlp->erange != NULL) {
4625 DECL_ANCHOR(ensure_part);
4626 LABEL *lstart = NEW_LABEL(0);
4627 LABEL *lend = NEW_LABEL(0);
4628
4629 add_ensure_range(iseq, enlp->erange, lstart, lend);
4630
4631 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enlp->prev;
4632 PUSH_LABEL(ensure_part, lstart);
4633 bool popped = true;
4634 PM_COMPILE_INTO_ANCHOR(ensure_part, (const pm_node_t *) enlp->ensure_node);
4635 PUSH_LABEL(ensure_part, lend);
4636 PUSH_SEQ(ensure, ensure_part);
4637 }
4638 else {
4639 if (!is_return) {
4640 break;
4641 }
4642 }
4643 enlp = enlp->prev;
4644 }
4645 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = prev_enlp;
4646 PUSH_SEQ(ret, ensure);
4647}
4648
4650 pm_scope_node_t *scope_node;
4651 rb_ast_id_table_t *local_table_for_iseq;
4652 int local_index;
4653};
4654
4655static int
4656pm_local_table_insert_func(st_data_t *key, st_data_t *value, st_data_t arg, int existing)
4657{
4658 if (!existing) {
4659 pm_constant_id_t constant_id = (pm_constant_id_t) *key;
4660 struct pm_local_table_insert_ctx * ctx = (struct pm_local_table_insert_ctx *) arg;
4661
4662 pm_scope_node_t *scope_node = ctx->scope_node;
4663 rb_ast_id_table_t *local_table_for_iseq = ctx->local_table_for_iseq;
4664 int local_index = ctx->local_index;
4665
4666 ID local = pm_constant_id_lookup(scope_node, constant_id);
4667 local_table_for_iseq->ids[local_index] = local;
4668
4669 *value = (st_data_t)local_index;
4670
4671 ctx->local_index++;
4672 }
4673
4674 return ST_CONTINUE;
4675}
4676
4682static void
4683pm_insert_local_index(pm_constant_id_t constant_id, int local_index, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq, pm_scope_node_t *scope_node)
4684{
4685 RUBY_ASSERT((constant_id & PM_SPECIAL_CONSTANT_FLAG) == 0);
4686
4687 ID local = pm_constant_id_lookup(scope_node, constant_id);
4688 local_table_for_iseq->ids[local_index] = local;
4689 st_insert(index_lookup_table, (st_data_t) constant_id, (st_data_t) local_index);
4690}
4691
4696static void
4697pm_insert_local_special(ID local_name, int local_index, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq)
4698{
4699 local_table_for_iseq->ids[local_index] = local_name;
4700 st_insert(index_lookup_table, (st_data_t) (local_name | PM_SPECIAL_CONSTANT_FLAG), (st_data_t) local_index);
4701}
4702
4709static int
4710pm_compile_destructured_param_locals(const pm_multi_target_node_t *node, st_table *index_lookup_table, rb_ast_id_table_t *local_table_for_iseq, pm_scope_node_t *scope_node, int local_index)
4711{
4712 for (size_t index = 0; index < node->lefts.size; index++) {
4713 const pm_node_t *left = node->lefts.nodes[index];
4714
4715 if (PM_NODE_TYPE_P(left, PM_REQUIRED_PARAMETER_NODE)) {
4716 if (!PM_NODE_FLAG_P(left, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
4717 pm_insert_local_index(((const pm_required_parameter_node_t *) left)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
4718 local_index++;
4719 }
4720 }
4721 else {
4722 RUBY_ASSERT(PM_NODE_TYPE_P(left, PM_MULTI_TARGET_NODE));
4723 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) left, index_lookup_table, local_table_for_iseq, scope_node, local_index);
4724 }
4725 }
4726
4727 if (node->rest != NULL && PM_NODE_TYPE_P(node->rest, PM_SPLAT_NODE)) {
4728 const pm_splat_node_t *rest = (const pm_splat_node_t *) node->rest;
4729
4730 if (rest->expression != NULL) {
4731 RUBY_ASSERT(PM_NODE_TYPE_P(rest->expression, PM_REQUIRED_PARAMETER_NODE));
4732
4733 if (!PM_NODE_FLAG_P(rest->expression, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
4734 pm_insert_local_index(((const pm_required_parameter_node_t *) rest->expression)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
4735 local_index++;
4736 }
4737 }
4738 }
4739
4740 for (size_t index = 0; index < node->rights.size; index++) {
4741 const pm_node_t *right = node->rights.nodes[index];
4742
4743 if (PM_NODE_TYPE_P(right, PM_REQUIRED_PARAMETER_NODE)) {
4744 if (!PM_NODE_FLAG_P(right, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
4745 pm_insert_local_index(((const pm_required_parameter_node_t *) right)->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
4746 local_index++;
4747 }
4748 }
4749 else {
4750 RUBY_ASSERT(PM_NODE_TYPE_P(right, PM_MULTI_TARGET_NODE));
4751 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) right, index_lookup_table, local_table_for_iseq, scope_node, local_index);
4752 }
4753 }
4754
4755 return local_index;
4756}
4757
4762static inline void
4763pm_compile_destructured_param_write(rb_iseq_t *iseq, const pm_required_parameter_node_t *node, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
4764{
4765 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
4766 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, node->name, 0);
4767 PUSH_SETLOCAL(ret, location, index.index, index.level);
4768}
4769
4778static void
4779pm_compile_destructured_param_writes(rb_iseq_t *iseq, const pm_multi_target_node_t *node, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
4780{
4781 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
4782 bool has_rest = (node->rest && PM_NODE_TYPE_P(node->rest, PM_SPLAT_NODE) && (((const pm_splat_node_t *) node->rest)->expression) != NULL);
4783 bool has_rights = node->rights.size > 0;
4784
4785 int flag = (has_rest || has_rights) ? 1 : 0;
4786 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->lefts.size), INT2FIX(flag));
4787
4788 for (size_t index = 0; index < node->lefts.size; index++) {
4789 const pm_node_t *left = node->lefts.nodes[index];
4790
4791 if (PM_NODE_TYPE_P(left, PM_REQUIRED_PARAMETER_NODE)) {
4792 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) left, ret, scope_node);
4793 }
4794 else {
4795 RUBY_ASSERT(PM_NODE_TYPE_P(left, PM_MULTI_TARGET_NODE));
4796 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) left, ret, scope_node);
4797 }
4798 }
4799
4800 if (has_rest) {
4801 if (has_rights) {
4802 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->rights.size), INT2FIX(3));
4803 }
4804
4805 const pm_node_t *rest = ((const pm_splat_node_t *) node->rest)->expression;
4806 RUBY_ASSERT(PM_NODE_TYPE_P(rest, PM_REQUIRED_PARAMETER_NODE));
4807
4808 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) rest, ret, scope_node);
4809 }
4810
4811 if (has_rights) {
4812 if (!has_rest) {
4813 PUSH_INSN2(ret, location, expandarray, INT2FIX(node->rights.size), INT2FIX(2));
4814 }
4815
4816 for (size_t index = 0; index < node->rights.size; index++) {
4817 const pm_node_t *right = node->rights.nodes[index];
4818
4819 if (PM_NODE_TYPE_P(right, PM_REQUIRED_PARAMETER_NODE)) {
4820 pm_compile_destructured_param_write(iseq, (const pm_required_parameter_node_t *) right, ret, scope_node);
4821 }
4822 else {
4823 RUBY_ASSERT(PM_NODE_TYPE_P(right, PM_MULTI_TARGET_NODE));
4824 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) right, ret, scope_node);
4825 }
4826 }
4827 }
4828}
4829
4835 // The pointer to the topn instruction that will need to be modified after
4836 // we know the total stack size of all of the targets.
4837 INSN *topn;
4838
4839 // The index of the stack from the base of the entire multi target at which
4840 // the parent expression is located.
4841 size_t stack_index;
4842
4843 // The number of slots in the stack that this node occupies.
4844 size_t stack_size;
4845
4846 // The position of the node in the list of targets.
4847 size_t position;
4848
4849 // A pointer to the next node in this linked list.
4850 struct pm_multi_target_state_node *next;
4852
4860typedef struct {
4861 // The total number of slots in the stack that this multi target occupies.
4862 size_t stack_size;
4863
4864 // The position of the current node being compiled. This is forwarded to
4865 // nodes when they are allocated.
4866 size_t position;
4867
4868 // A pointer to the head of this linked list.
4870
4871 // A pointer to the tail of this linked list.
4874
4878static void
4879pm_multi_target_state_push(pm_multi_target_state_t *state, INSN *topn, size_t stack_size)
4880{
4882 node->topn = topn;
4883 node->stack_index = state->stack_size + 1;
4884 node->stack_size = stack_size;
4885 node->position = state->position;
4886 node->next = NULL;
4887
4888 if (state->head == NULL) {
4889 state->head = node;
4890 state->tail = node;
4891 }
4892 else {
4893 state->tail->next = node;
4894 state->tail = node;
4895 }
4896
4897 state->stack_size += stack_size;
4898}
4899
4905static void
4906pm_multi_target_state_update(pm_multi_target_state_t *state)
4907{
4908 // If nothing was ever pushed onto the stack, then we don't need to do any
4909 // kind of updates.
4910 if (state->stack_size == 0) return;
4911
4912 pm_multi_target_state_node_t *current = state->head;
4914
4915 while (current != NULL) {
4916 VALUE offset = INT2FIX(state->stack_size - current->stack_index + current->position);
4917 current->topn->operands[0] = offset;
4918
4919 // stack_size will be > 1 in the case that we compiled an index target
4920 // and it had arguments. In this case, we use multiple topn instructions
4921 // to grab up all of the arguments as well, so those offsets need to be
4922 // updated as well.
4923 if (current->stack_size > 1) {
4924 INSN *insn = current->topn;
4925
4926 for (size_t index = 1; index < current->stack_size; index += 1) {
4927 LINK_ELEMENT *element = get_next_insn(insn);
4928 RUBY_ASSERT(IS_INSN(element));
4929
4930 insn = (INSN *) element;
4931 RUBY_ASSERT(insn->insn_id == BIN(topn));
4932
4933 insn->operands[0] = offset;
4934 }
4935 }
4936
4937 previous = current;
4938 current = current->next;
4939
4940 xfree(previous);
4941 }
4942}
4943
4944static void
4945pm_compile_multi_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state);
4946
4975static void
4976pm_compile_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state)
4977{
4978 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
4979
4980 switch (PM_NODE_TYPE(node)) {
4981 case PM_LOCAL_VARIABLE_TARGET_NODE: {
4982 // Local variable targets have no parent expression, so they only need
4983 // to compile the write.
4984 //
4985 // for i in []; end
4986 //
4988 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
4989
4990 PUSH_SETLOCAL(writes, location, index.index, index.level);
4991 break;
4992 }
4993 case PM_CLASS_VARIABLE_TARGET_NODE: {
4994 // Class variable targets have no parent expression, so they only need
4995 // to compile the write.
4996 //
4997 // for @@i in []; end
4998 //
5000 ID name = pm_constant_id_lookup(scope_node, cast->name);
5001
5002 VALUE operand = ID2SYM(name);
5003 PUSH_INSN2(writes, location, setclassvariable, operand, get_cvar_ic_value(iseq, name));
5004 break;
5005 }
5006 case PM_CONSTANT_TARGET_NODE: {
5007 // Constant targets have no parent expression, so they only need to
5008 // compile the write.
5009 //
5010 // for I in []; end
5011 //
5012 const pm_constant_target_node_t *cast = (const pm_constant_target_node_t *) node;
5013 ID name = pm_constant_id_lookup(scope_node, cast->name);
5014
5015 VALUE operand = ID2SYM(name);
5016 PUSH_INSN1(writes, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5017 PUSH_INSN1(writes, location, setconstant, operand);
5018 break;
5019 }
5020 case PM_GLOBAL_VARIABLE_TARGET_NODE: {
5021 // Global variable targets have no parent expression, so they only need
5022 // to compile the write.
5023 //
5024 // for $i in []; end
5025 //
5027 ID name = pm_constant_id_lookup(scope_node, cast->name);
5028
5029 VALUE operand = ID2SYM(name);
5030 PUSH_INSN1(writes, location, setglobal, operand);
5031 break;
5032 }
5033 case PM_INSTANCE_VARIABLE_TARGET_NODE: {
5034 // Instance variable targets have no parent expression, so they only
5035 // need to compile the write.
5036 //
5037 // for @i in []; end
5038 //
5040 ID name = pm_constant_id_lookup(scope_node, cast->name);
5041
5042 VALUE operand = ID2SYM(name);
5043 PUSH_INSN2(writes, location, setinstancevariable, operand, get_ivar_ic_value(iseq, name));
5044 break;
5045 }
5046 case PM_CONSTANT_PATH_TARGET_NODE: {
5047 // Constant path targets have a parent expression that is the object
5048 // that owns the constant. This needs to be compiled first into the
5049 // parents sequence. If no parent is found, then it represents using the
5050 // unary :: operator to indicate a top-level constant. In that case we
5051 // need to push Object onto the stack.
5052 //
5053 // for I::J in []; end
5054 //
5056 ID name = pm_constant_id_lookup(scope_node, cast->name);
5057
5058 if (cast->parent != NULL) {
5059 pm_compile_node(iseq, cast->parent, parents, false, scope_node);
5060 }
5061 else {
5062 PUSH_INSN1(parents, location, putobject, rb_cObject);
5063 }
5064
5065 if (state == NULL) {
5066 PUSH_INSN(writes, location, swap);
5067 }
5068 else {
5069 PUSH_INSN1(writes, location, topn, INT2FIX(1));
5070 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), 1);
5071 }
5072
5073 VALUE operand = ID2SYM(name);
5074 PUSH_INSN1(writes, location, setconstant, operand);
5075
5076 if (state != NULL) {
5077 PUSH_INSN(cleanup, location, pop);
5078 }
5079
5080 break;
5081 }
5082 case PM_CALL_TARGET_NODE: {
5083 // Call targets have a parent expression that is the receiver of the
5084 // method being called. This needs to be compiled first into the parents
5085 // sequence. These nodes cannot have arguments, so the method call is
5086 // compiled with a single argument which represents the value being
5087 // written.
5088 //
5089 // for i.j in []; end
5090 //
5091 const pm_call_target_node_t *cast = (const pm_call_target_node_t *) node;
5092 ID method_id = pm_constant_id_lookup(scope_node, cast->name);
5093
5094 pm_compile_node(iseq, cast->receiver, parents, false, scope_node);
5095
5096 LABEL *safe_label = NULL;
5097 if (PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
5098 safe_label = NEW_LABEL(location.line);
5099 PUSH_INSN(parents, location, dup);
5100 PUSH_INSNL(parents, location, branchnil, safe_label);
5101 }
5102
5103 if (state != NULL) {
5104 PUSH_INSN1(writes, location, topn, INT2FIX(1));
5105 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), 1);
5106 PUSH_INSN(writes, location, swap);
5107 }
5108
5109 int flags = VM_CALL_ARGS_SIMPLE;
5110 if (PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) flags |= VM_CALL_FCALL;
5111
5112 PUSH_SEND_WITH_FLAG(writes, location, method_id, INT2FIX(1), INT2FIX(flags));
5113 if (safe_label != NULL && state == NULL) PUSH_LABEL(writes, safe_label);
5114 PUSH_INSN(writes, location, pop);
5115 if (safe_label != NULL && state != NULL) PUSH_LABEL(writes, safe_label);
5116
5117 if (state != NULL) {
5118 PUSH_INSN(cleanup, location, pop);
5119 }
5120
5121 break;
5122 }
5123 case PM_INDEX_TARGET_NODE: {
5124 // Index targets have a parent expression that is the receiver of the
5125 // method being called and any additional arguments that are being
5126 // passed along with the value being written. The receiver and arguments
5127 // both need to be on the stack. Note that this is even more complicated
5128 // by the fact that these nodes can hold a block using the unary &
5129 // operator.
5130 //
5131 // for i[:j] in []; end
5132 //
5133 const pm_index_target_node_t *cast = (const pm_index_target_node_t *) node;
5134
5135 pm_compile_node(iseq, cast->receiver, parents, false, scope_node);
5136
5137 int flags = 0;
5138 struct rb_callinfo_kwarg *kwargs = NULL;
5139 int argc = pm_setup_args(cast->arguments, (const pm_node_t *) cast->block, &flags, &kwargs, iseq, parents, scope_node, &location);
5140
5141 if (state != NULL) {
5142 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
5143 pm_multi_target_state_push(state, (INSN *) LAST_ELEMENT(writes), argc + 1);
5144
5145 if (argc == 0) {
5146 PUSH_INSN(writes, location, swap);
5147 }
5148 else {
5149 for (int index = 0; index < argc; index++) {
5150 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
5151 }
5152 PUSH_INSN1(writes, location, topn, INT2FIX(argc + 1));
5153 }
5154 }
5155
5156 // The argc that we're going to pass to the send instruction is the
5157 // number of arguments + 1 for the value being written. If there's a
5158 // splat, then we need to insert newarray and concatarray instructions
5159 // after the arguments have been written.
5160 int ci_argc = argc + 1;
5161 if (flags & VM_CALL_ARGS_SPLAT) {
5162 ci_argc--;
5163 PUSH_INSN1(writes, location, newarray, INT2FIX(1));
5164 PUSH_INSN(writes, location, concatarray);
5165 }
5166
5167 PUSH_SEND_R(writes, location, idASET, INT2NUM(ci_argc), NULL, INT2FIX(flags), kwargs);
5168 PUSH_INSN(writes, location, pop);
5169
5170 if (state != NULL) {
5171 if (argc != 0) {
5172 PUSH_INSN(writes, location, pop);
5173 }
5174
5175 for (int index = 0; index < argc + 1; index++) {
5176 PUSH_INSN(cleanup, location, pop);
5177 }
5178 }
5179
5180 break;
5181 }
5182 case PM_MULTI_TARGET_NODE: {
5183 // Multi target nodes represent a set of writes to multiple variables.
5184 // The parent expressions are the combined set of the parent expressions
5185 // of its inner target nodes.
5186 //
5187 // for i, j in []; end
5188 //
5189 size_t before_position;
5190 if (state != NULL) {
5191 before_position = state->position;
5192 state->position--;
5193 }
5194
5195 pm_compile_multi_target_node(iseq, node, parents, writes, cleanup, scope_node, state);
5196 if (state != NULL) state->position = before_position;
5197
5198 break;
5199 }
5200 case PM_SPLAT_NODE: {
5201 // Splat nodes capture all values into an array. They can be used
5202 // as targets in assignments or for loops.
5203 //
5204 // for *x in []; end
5205 //
5206 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
5207
5208 if (cast->expression != NULL) {
5209 pm_compile_target_node(iseq, cast->expression, parents, writes, cleanup, scope_node, state);
5210 }
5211
5212 break;
5213 }
5214 default:
5215 rb_bug("Unexpected node type: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
5216 break;
5217 }
5218}
5219
5225static void
5226pm_compile_multi_target_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const parents, LINK_ANCHOR *const writes, LINK_ANCHOR *const cleanup, pm_scope_node_t *scope_node, pm_multi_target_state_t *state)
5227{
5228 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5229 const pm_node_list_t *lefts;
5230 const pm_node_t *rest;
5231 const pm_node_list_t *rights;
5232
5233 switch (PM_NODE_TYPE(node)) {
5234 case PM_MULTI_TARGET_NODE: {
5235 const pm_multi_target_node_t *cast = (const pm_multi_target_node_t *) node;
5236 lefts = &cast->lefts;
5237 rest = cast->rest;
5238 rights = &cast->rights;
5239 break;
5240 }
5241 case PM_MULTI_WRITE_NODE: {
5242 const pm_multi_write_node_t *cast = (const pm_multi_write_node_t *) node;
5243 lefts = &cast->lefts;
5244 rest = cast->rest;
5245 rights = &cast->rights;
5246 break;
5247 }
5248 default:
5249 rb_bug("Unsupported node %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
5250 break;
5251 }
5252
5253 bool has_rest = (rest != NULL) && PM_NODE_TYPE_P(rest, PM_SPLAT_NODE) && ((const pm_splat_node_t *) rest)->expression != NULL;
5254 bool has_posts = rights->size > 0;
5255
5256 // The first instruction in the writes sequence is going to spread the
5257 // top value of the stack onto the number of values that we're going to
5258 // write.
5259 PUSH_INSN2(writes, location, expandarray, INT2FIX(lefts->size), INT2FIX((has_rest || has_posts) ? 1 : 0));
5260
5261 // We need to keep track of some additional state information as we're
5262 // going through the targets because we will need to revisit them once
5263 // we know how many values are being pushed onto the stack.
5264 pm_multi_target_state_t target_state = { 0 };
5265 if (state == NULL) state = &target_state;
5266
5267 size_t base_position = state->position;
5268 size_t splat_position = (has_rest || has_posts) ? 1 : 0;
5269
5270 // Next, we'll iterate through all of the leading targets.
5271 for (size_t index = 0; index < lefts->size; index++) {
5272 const pm_node_t *target = lefts->nodes[index];
5273 state->position = lefts->size - index + splat_position + base_position;
5274 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, state);
5275 }
5276
5277 // Next, we'll compile the rest target if there is one.
5278 if (has_rest) {
5279 const pm_node_t *target = ((const pm_splat_node_t *) rest)->expression;
5280 state->position = 1 + rights->size + base_position;
5281
5282 if (has_posts) {
5283 PUSH_INSN2(writes, location, expandarray, INT2FIX(rights->size), INT2FIX(3));
5284 }
5285
5286 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, state);
5287 }
5288
5289 // Finally, we'll compile the trailing targets.
5290 if (has_posts) {
5291 if (!has_rest && rest != NULL) {
5292 PUSH_INSN2(writes, location, expandarray, INT2FIX(rights->size), INT2FIX(2));
5293 }
5294
5295 for (size_t index = 0; index < rights->size; index++) {
5296 const pm_node_t *target = rights->nodes[index];
5297 state->position = rights->size - index + base_position;
5298 pm_compile_target_node(iseq, target, parents, writes, cleanup, scope_node, state);
5299 }
5300 }
5301}
5302
5308static void
5309pm_compile_for_node_index(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node)
5310{
5311 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5312
5313 switch (PM_NODE_TYPE(node)) {
5314 case PM_LOCAL_VARIABLE_TARGET_NODE: {
5315 // For local variables, all we have to do is retrieve the value and then
5316 // compile the index node.
5317 PUSH_GETLOCAL(ret, location, 1, 0);
5318 pm_compile_target_node(iseq, node, ret, ret, ret, scope_node, NULL);
5319 break;
5320 }
5321 case PM_CLASS_VARIABLE_TARGET_NODE:
5322 case PM_CONSTANT_TARGET_NODE:
5323 case PM_GLOBAL_VARIABLE_TARGET_NODE:
5324 case PM_INSTANCE_VARIABLE_TARGET_NODE:
5325 case PM_CONSTANT_PATH_TARGET_NODE:
5326 case PM_CALL_TARGET_NODE:
5327 case PM_INDEX_TARGET_NODE: {
5328 // For other targets, we need to potentially compile the parent or
5329 // owning expression of this target, then retrieve the value, expand it,
5330 // and then compile the necessary writes.
5331 DECL_ANCHOR(writes);
5332 DECL_ANCHOR(cleanup);
5333
5334 pm_multi_target_state_t state = { 0 };
5335 state.position = 1;
5336 pm_compile_target_node(iseq, node, ret, writes, cleanup, scope_node, &state);
5337
5338 PUSH_GETLOCAL(ret, location, 1, 0);
5339 PUSH_INSN2(ret, location, expandarray, INT2FIX(1), INT2FIX(0));
5340
5341 PUSH_SEQ(ret, writes);
5342 PUSH_SEQ(ret, cleanup);
5343
5344 pm_multi_target_state_update(&state);
5345 break;
5346 }
5347 case PM_SPLAT_NODE:
5348 case PM_MULTI_TARGET_NODE: {
5349 DECL_ANCHOR(writes);
5350 DECL_ANCHOR(cleanup);
5351
5352 pm_compile_target_node(iseq, node, ret, writes, cleanup, scope_node, NULL);
5353
5354 LABEL *not_single = NEW_LABEL(location.line);
5355 LABEL *not_ary = NEW_LABEL(location.line);
5356
5357 // When there are multiple targets, we'll do a bunch of work to convert
5358 // the value into an array before we expand it. Effectively we're trying
5359 // to accomplish:
5360 //
5361 // (args.length == 1 && Array.try_convert(args[0])) || args
5362 //
5363 PUSH_GETLOCAL(ret, location, 1, 0);
5364 PUSH_INSN(ret, location, dup);
5365 PUSH_CALL(ret, location, idLength, INT2FIX(0));
5366 PUSH_INSN1(ret, location, putobject, INT2FIX(1));
5367 PUSH_CALL(ret, location, idEq, INT2FIX(1));
5368 PUSH_INSNL(ret, location, branchunless, not_single);
5369 PUSH_INSN(ret, location, dup);
5370 PUSH_INSN1(ret, location, putobject, INT2FIX(0));
5371 PUSH_CALL(ret, location, idAREF, INT2FIX(1));
5372 PUSH_INSN1(ret, location, putobject, rb_cArray);
5373 PUSH_INSN(ret, location, swap);
5374 PUSH_CALL(ret, location, rb_intern("try_convert"), INT2FIX(1));
5375 PUSH_INSN(ret, location, dup);
5376 PUSH_INSNL(ret, location, branchunless, not_ary);
5377 PUSH_INSN(ret, location, swap);
5378
5379 PUSH_LABEL(ret, not_ary);
5380 PUSH_INSN(ret, location, pop);
5381
5382 PUSH_LABEL(ret, not_single);
5383
5384 if (PM_NODE_TYPE_P(node, PM_SPLAT_NODE)) {
5385 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
5386 PUSH_INSN2(ret, location, expandarray, INT2FIX(0), INT2FIX(cast->expression == NULL ? 0 : 1));
5387 }
5388
5389 PUSH_SEQ(ret, writes);
5390 PUSH_SEQ(ret, cleanup);
5391 break;
5392 }
5393 default:
5394 rb_bug("Unexpected node type for index in for node: %s", pm_node_type_to_str(PM_NODE_TYPE(node)));
5395 break;
5396 }
5397}
5398
5399static void
5400pm_compile_rescue(rb_iseq_t *iseq, const pm_begin_node_t *cast, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5401{
5402 const pm_parser_t *parser = scope_node->parser;
5403
5404 LABEL *lstart = NEW_LABEL(node_location->line);
5405 LABEL *lend = NEW_LABEL(node_location->line);
5406 LABEL *lcont = NEW_LABEL(node_location->line);
5407
5408 pm_scope_node_t rescue_scope_node;
5409 pm_scope_node_init((const pm_node_t *) cast->rescue_clause, &rescue_scope_node, scope_node);
5410
5411 rb_iseq_t *rescue_iseq = NEW_CHILD_ISEQ(
5412 &rescue_scope_node,
5413 rb_str_concat(rb_str_new2("rescue in "), ISEQ_BODY(iseq)->location.label),
5414 ISEQ_TYPE_RESCUE,
5415 pm_node_line_number(parser, (const pm_node_t *) cast->rescue_clause)
5416 );
5417
5418 pm_scope_node_destroy(&rescue_scope_node);
5419
5420 lstart->rescued = LABEL_RESCUE_BEG;
5421 lend->rescued = LABEL_RESCUE_END;
5422 PUSH_LABEL(ret, lstart);
5423
5424 bool prev_in_rescue = ISEQ_COMPILE_DATA(iseq)->in_rescue;
5425 ISEQ_COMPILE_DATA(iseq)->in_rescue = true;
5426
5427 if (cast->statements != NULL) {
5428 PM_COMPILE_NOT_POPPED((const pm_node_t *) cast->statements);
5429 }
5430 else {
5431 const pm_node_location_t location = PM_NODE_START_LOCATION(parser, cast->rescue_clause);
5432 PUSH_INSN(ret, location, putnil);
5433 }
5434
5435 ISEQ_COMPILE_DATA(iseq)->in_rescue = prev_in_rescue;
5436 PUSH_LABEL(ret, lend);
5437
5438 if (cast->else_clause != NULL) {
5439 if (!popped) PUSH_INSN(ret, *node_location, pop);
5440 PM_COMPILE((const pm_node_t *) cast->else_clause);
5441 }
5442
5443 PUSH_INSN(ret, *node_location, nop);
5444 PUSH_LABEL(ret, lcont);
5445
5446 if (popped) PUSH_INSN(ret, *node_location, pop);
5447 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue_iseq, lcont);
5448 PUSH_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
5449}
5450
5451static void
5452pm_compile_ensure(rb_iseq_t *iseq, const pm_begin_node_t *cast, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5453{
5454 const pm_parser_t *parser = scope_node->parser;
5455 const pm_statements_node_t *statements = cast->ensure_clause->statements;
5456
5457 pm_node_location_t location;
5458 if (statements != NULL) {
5459 location = PM_NODE_START_LOCATION(parser, statements);
5460 }
5461 else {
5462 location = *node_location;
5463 }
5464
5465 LABEL *lstart = NEW_LABEL(location.line);
5466 LABEL *lend = NEW_LABEL(location.line);
5467 LABEL *lcont = NEW_LABEL(location.line);
5468
5469 struct ensure_range er;
5471 struct ensure_range *erange;
5472
5473 DECL_ANCHOR(ensr);
5474 if (statements != NULL) {
5475 pm_compile_node(iseq, (const pm_node_t *) statements, ensr, true, scope_node);
5476 }
5477
5478 LINK_ELEMENT *last = ensr->last;
5479 bool last_leave = last && IS_INSN(last) && IS_INSN_ID(last, leave);
5480
5481 er.begin = lstart;
5482 er.end = lend;
5483 er.next = 0;
5484 push_ensure_entry(iseq, &enl, &er, (void *) cast->ensure_clause);
5485
5486 PUSH_LABEL(ret, lstart);
5487 if (cast->rescue_clause != NULL) {
5488 pm_compile_rescue(iseq, cast, node_location, ret, popped | last_leave, scope_node);
5489 }
5490 else if (cast->statements != NULL) {
5491 pm_compile_node(iseq, (const pm_node_t *) cast->statements, ret, popped | last_leave, scope_node);
5492 }
5493 else if (!(popped | last_leave)) {
5494 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
5495 }
5496
5497 PUSH_LABEL(ret, lend);
5498 PUSH_SEQ(ret, ensr);
5499 if (!popped && last_leave) PUSH_INSN(ret, *node_location, putnil);
5500 PUSH_LABEL(ret, lcont);
5501 if (last_leave) PUSH_INSN(ret, *node_location, pop);
5502
5503 pm_scope_node_t next_scope_node;
5504 pm_scope_node_init((const pm_node_t *) cast->ensure_clause, &next_scope_node, scope_node);
5505
5506 rb_iseq_t *child_iseq = NEW_CHILD_ISEQ(
5507 &next_scope_node,
5508 rb_str_concat(rb_str_new2("ensure in "), ISEQ_BODY(iseq)->location.label),
5509 ISEQ_TYPE_ENSURE,
5510 location.line
5511 );
5512
5513 pm_scope_node_destroy(&next_scope_node);
5514
5515 erange = ISEQ_COMPILE_DATA(iseq)->ensure_node_stack->erange;
5516 if (lstart->link.next != &lend->link) {
5517 while (erange) {
5518 PUSH_CATCH_ENTRY(CATCH_TYPE_ENSURE, erange->begin, erange->end, child_iseq, lcont);
5519 erange = erange->next;
5520 }
5521 }
5522 ISEQ_COMPILE_DATA(iseq)->ensure_node_stack = enl.prev;
5523}
5524
5529static inline bool
5530pm_opt_str_freeze_p(const rb_iseq_t *iseq, const pm_call_node_t *node)
5531{
5532 return (
5533 !PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION) &&
5534 node->receiver != NULL &&
5535 PM_NODE_TYPE_P(node->receiver, PM_STRING_NODE) &&
5536 node->arguments == NULL &&
5537 node->block == NULL &&
5538 ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction
5539 );
5540}
5541
5546static void
5547pm_compile_constant_read(rb_iseq_t *iseq, VALUE name, const pm_location_t *name_loc, uint32_t node_id, LINK_ANCHOR *const ret, const pm_scope_node_t *scope_node)
5548{
5549 const pm_node_location_t location = PM_LOCATION_START_LOCATION(scope_node->parser, name_loc, node_id);
5550
5551 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache) {
5552 ISEQ_BODY(iseq)->ic_size++;
5553 VALUE segments = rb_ary_new_from_args(1, name);
5554 RB_OBJ_SET_SHAREABLE(segments);
5555 PUSH_INSN1(ret, location, opt_getconstant_path, segments);
5556 }
5557 else {
5558 PUSH_INSN(ret, location, putnil);
5559 PUSH_INSN1(ret, location, putobject, Qtrue);
5560 PUSH_INSN1(ret, location, getconstant, name);
5561 }
5562}
5563
5568static VALUE
5569pm_constant_path_parts(const pm_node_t *node, const pm_scope_node_t *scope_node)
5570{
5571 VALUE parts = rb_ary_new();
5572
5573 while (true) {
5574 switch (PM_NODE_TYPE(node)) {
5575 case PM_CONSTANT_READ_NODE: {
5576 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
5577 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
5578
5579 rb_ary_unshift(parts, name);
5580 return parts;
5581 }
5582 case PM_CONSTANT_PATH_NODE: {
5583 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
5584 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
5585
5586 rb_ary_unshift(parts, name);
5587 if (cast->parent == NULL) {
5588 rb_ary_unshift(parts, ID2SYM(idNULL));
5589 return parts;
5590 }
5591
5592 node = cast->parent;
5593 break;
5594 }
5595 default:
5596 return Qnil;
5597 }
5598 }
5599}
5600
5606static void
5607pm_compile_constant_path(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const prefix, LINK_ANCHOR *const body, bool popped, pm_scope_node_t *scope_node)
5608{
5609 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5610
5611 switch (PM_NODE_TYPE(node)) {
5612 case PM_CONSTANT_READ_NODE: {
5613 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
5614 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
5615
5616 PUSH_INSN1(body, location, putobject, Qtrue);
5617 PUSH_INSN1(body, location, getconstant, name);
5618 break;
5619 }
5620 case PM_CONSTANT_PATH_NODE: {
5621 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) node;
5622 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
5623
5624 if (cast->parent == NULL) {
5625 PUSH_INSN(body, location, pop);
5626 PUSH_INSN1(body, location, putobject, rb_cObject);
5627 PUSH_INSN1(body, location, putobject, Qtrue);
5628 PUSH_INSN1(body, location, getconstant, name);
5629 }
5630 else {
5631 pm_compile_constant_path(iseq, cast->parent, prefix, body, false, scope_node);
5632 PUSH_INSN1(body, location, putobject, Qfalse);
5633 PUSH_INSN1(body, location, getconstant, name);
5634 }
5635 break;
5636 }
5637 default:
5638 PM_COMPILE_INTO_ANCHOR(prefix, node);
5639 break;
5640 }
5641}
5642
5646static VALUE
5647pm_compile_shareable_constant_literal(rb_iseq_t *iseq, const pm_node_t *node, const pm_scope_node_t *scope_node)
5648{
5649 switch (PM_NODE_TYPE(node)) {
5650 case PM_TRUE_NODE:
5651 case PM_FALSE_NODE:
5652 case PM_NIL_NODE:
5653 case PM_SYMBOL_NODE:
5654 case PM_REGULAR_EXPRESSION_NODE:
5655 case PM_SOURCE_LINE_NODE:
5656 case PM_INTEGER_NODE:
5657 case PM_FLOAT_NODE:
5658 case PM_RATIONAL_NODE:
5659 case PM_IMAGINARY_NODE:
5660 case PM_SOURCE_ENCODING_NODE:
5661 return pm_static_literal_value(iseq, node, scope_node);
5662 case PM_STRING_NODE:
5663 return parse_static_literal_string(iseq, scope_node, node, &((const pm_string_node_t *) node)->unescaped);
5664 case PM_SOURCE_FILE_NODE:
5665 return pm_source_file_value((const pm_source_file_node_t *) node, scope_node);
5666 case PM_ARRAY_NODE: {
5667 const pm_array_node_t *cast = (const pm_array_node_t *) node;
5668 VALUE result = rb_ary_new_capa(cast->elements.size);
5669
5670 for (size_t index = 0; index < cast->elements.size; index++) {
5671 VALUE element = pm_compile_shareable_constant_literal(iseq, cast->elements.nodes[index], scope_node);
5672 if (element == Qundef) return Qundef;
5673
5674 rb_ary_push(result, element);
5675 }
5676
5677 return rb_ractor_make_shareable(result);
5678 }
5679 case PM_HASH_NODE: {
5680 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
5681 VALUE result = rb_hash_new_capa(cast->elements.size);
5682
5683 for (size_t index = 0; index < cast->elements.size; index++) {
5684 const pm_node_t *element = cast->elements.nodes[index];
5685 if (!PM_NODE_TYPE_P(element, PM_ASSOC_NODE)) return Qundef;
5686
5687 const pm_assoc_node_t *assoc = (const pm_assoc_node_t *) element;
5688
5689 VALUE key = pm_compile_shareable_constant_literal(iseq, assoc->key, scope_node);
5690 if (key == Qundef) return Qundef;
5691
5692 VALUE value = pm_compile_shareable_constant_literal(iseq, assoc->value, scope_node);
5693 if (value == Qundef) return Qundef;
5694
5695 rb_hash_aset(result, key, value);
5696 }
5697
5698 return rb_ractor_make_shareable(result);
5699 }
5700 default:
5701 return Qundef;
5702 }
5703}
5704
5709static void
5710pm_compile_shareable_constant_value(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_flags_t shareability, VALUE path, LINK_ANCHOR *const ret, pm_scope_node_t *scope_node, bool top)
5711{
5712 VALUE literal = pm_compile_shareable_constant_literal(iseq, node, scope_node);
5713 if (literal != Qundef) {
5714 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5715 PUSH_INSN1(ret, location, putobject, literal);
5716 return;
5717 }
5718
5719 const pm_node_location_t location = PM_NODE_START_LOCATION(scope_node->parser, node);
5720 switch (PM_NODE_TYPE(node)) {
5721 case PM_ARRAY_NODE: {
5722 const pm_array_node_t *cast = (const pm_array_node_t *) node;
5723
5724 if (top) {
5725 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5726 }
5727
5728 for (size_t index = 0; index < cast->elements.size; index++) {
5729 pm_compile_shareable_constant_value(iseq, cast->elements.nodes[index], shareability, path, ret, scope_node, false);
5730 }
5731
5732 PUSH_INSN1(ret, location, newarray, INT2FIX(cast->elements.size));
5733
5734 if (top) {
5735 ID method_id = (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) ? rb_intern("make_shareable_copy") : rb_intern("make_shareable");
5736 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5737 }
5738
5739 return;
5740 }
5741 case PM_HASH_NODE: {
5742 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
5743
5744 if (top) {
5745 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5746 }
5747
5748 pm_compile_hash_elements(iseq, (const pm_node_t *) cast, &cast->elements, shareability, path, false, ret, scope_node);
5749
5750 if (top) {
5751 ID method_id = (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) ? rb_intern("make_shareable_copy") : rb_intern("make_shareable");
5752 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5753 }
5754
5755 return;
5756 }
5757 default: {
5758 DECL_ANCHOR(value_seq);
5759
5760 pm_compile_node(iseq, node, value_seq, false, scope_node);
5761 if (PM_NODE_TYPE_P(node, PM_INTERPOLATED_STRING_NODE)) {
5762 PUSH_SEND_WITH_FLAG(value_seq, location, idUMinus, INT2FIX(0), INT2FIX(VM_CALL_ARGS_SIMPLE));
5763 }
5764
5765 if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_LITERAL) {
5766 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5767 PUSH_SEQ(ret, value_seq);
5768 if (!RB_OBJ_SHAREABLE_P(path)) {
5769 RB_OBJ_SET_SHAREABLE(path);
5770 }
5771 PUSH_INSN1(ret, location, putobject, path);
5772 PUSH_SEND_WITH_FLAG(ret, location, rb_intern("ensure_shareable"), INT2FIX(2), INT2FIX(VM_CALL_ARGS_SIMPLE));
5773 }
5774 else if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY) {
5775 if (top) PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5776 PUSH_SEQ(ret, value_seq);
5777 if (top) PUSH_SEND_WITH_FLAG(ret, location, rb_intern("make_shareable_copy"), INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5778 }
5779 else if (shareability & PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_EVERYTHING) {
5780 if (top) PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
5781 PUSH_SEQ(ret, value_seq);
5782 if (top) PUSH_SEND_WITH_FLAG(ret, location, rb_intern("make_shareable"), INT2FIX(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5783 }
5784
5785 break;
5786 }
5787 }
5788}
5789
5794static void
5795pm_compile_constant_write_node(rb_iseq_t *iseq, const pm_constant_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5796{
5797 const pm_node_location_t location = *node_location;
5798 ID name_id = pm_constant_id_lookup(scope_node, node->name);
5799
5800 if (shareability != 0) {
5801 pm_compile_shareable_constant_value(iseq, node->value, shareability, rb_id2str(name_id), ret, scope_node, true);
5802 }
5803 else {
5804 PM_COMPILE_NOT_POPPED(node->value);
5805 }
5806
5807 if (!popped) PUSH_INSN(ret, location, dup);
5808 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5809
5810 VALUE operand = ID2SYM(name_id);
5811 PUSH_INSN1(ret, location, setconstant, operand);
5812}
5813
5818static void
5819pm_compile_constant_and_write_node(rb_iseq_t *iseq, const pm_constant_and_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5820{
5821 const pm_node_location_t location = *node_location;
5822
5823 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
5824 LABEL *end_label = NEW_LABEL(location.line);
5825
5826 pm_compile_constant_read(iseq, name, &node->name_loc, location.node_id, ret, scope_node);
5827 if (!popped) PUSH_INSN(ret, location, dup);
5828
5829 PUSH_INSNL(ret, location, branchunless, end_label);
5830 if (!popped) PUSH_INSN(ret, location, pop);
5831
5832 if (shareability != 0) {
5833 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
5834 }
5835 else {
5836 PM_COMPILE_NOT_POPPED(node->value);
5837 }
5838
5839 if (!popped) PUSH_INSN(ret, location, dup);
5840 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5841 PUSH_INSN1(ret, location, setconstant, name);
5842 PUSH_LABEL(ret, end_label);
5843}
5844
5849static void
5850pm_compile_constant_or_write_node(rb_iseq_t *iseq, const pm_constant_or_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5851{
5852 const pm_node_location_t location = *node_location;
5853 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
5854
5855 LABEL *set_label = NEW_LABEL(location.line);
5856 LABEL *end_label = NEW_LABEL(location.line);
5857
5858 PUSH_INSN(ret, location, putnil);
5859 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST), name, Qtrue);
5860 PUSH_INSNL(ret, location, branchunless, set_label);
5861
5862 pm_compile_constant_read(iseq, name, &node->name_loc, location.node_id, ret, scope_node);
5863 if (!popped) PUSH_INSN(ret, location, dup);
5864
5865 PUSH_INSNL(ret, location, branchif, end_label);
5866 if (!popped) PUSH_INSN(ret, location, pop);
5867 PUSH_LABEL(ret, set_label);
5868
5869 if (shareability != 0) {
5870 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
5871 }
5872 else {
5873 PM_COMPILE_NOT_POPPED(node->value);
5874 }
5875
5876 if (!popped) PUSH_INSN(ret, location, dup);
5877 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5878 PUSH_INSN1(ret, location, setconstant, name);
5879 PUSH_LABEL(ret, end_label);
5880}
5881
5886static void
5887pm_compile_constant_operator_write_node(rb_iseq_t *iseq, const pm_constant_operator_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5888{
5889 const pm_node_location_t location = *node_location;
5890
5891 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, node->name));
5892 ID method_id = pm_constant_id_lookup(scope_node, node->binary_operator);
5893
5894 pm_compile_constant_read(iseq, name, &node->name_loc, location.node_id, ret, scope_node);
5895
5896 if (shareability != 0) {
5897 pm_compile_shareable_constant_value(iseq, node->value, shareability, name, ret, scope_node, true);
5898 }
5899 else {
5900 PM_COMPILE_NOT_POPPED(node->value);
5901 }
5902
5903 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
5904 if (!popped) PUSH_INSN(ret, location, dup);
5905
5906 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CONST_BASE));
5907 PUSH_INSN1(ret, location, setconstant, name);
5908}
5909
5914static VALUE
5915pm_constant_path_path(const pm_constant_path_node_t *node, const pm_scope_node_t *scope_node)
5916{
5917 VALUE parts = rb_ary_new();
5918 rb_ary_push(parts, rb_id2str(pm_constant_id_lookup(scope_node, node->name)));
5919
5920 const pm_node_t *current = node->parent;
5921 while (current != NULL && PM_NODE_TYPE_P(current, PM_CONSTANT_PATH_NODE)) {
5922 const pm_constant_path_node_t *cast = (const pm_constant_path_node_t *) current;
5923 rb_ary_unshift(parts, rb_id2str(pm_constant_id_lookup(scope_node, cast->name)));
5924 current = cast->parent;
5925 }
5926
5927 if (current == NULL) {
5928 rb_ary_unshift(parts, rb_id2str(idNULL));
5929 }
5930 else if (PM_NODE_TYPE_P(current, PM_CONSTANT_READ_NODE)) {
5931 rb_ary_unshift(parts, rb_id2str(pm_constant_id_lookup(scope_node, ((const pm_constant_read_node_t *) current)->name)));
5932 }
5933 else {
5934 rb_ary_unshift(parts, rb_str_new_cstr("..."));
5935 }
5936
5937 return rb_ary_join(parts, rb_str_new_cstr("::"));
5938}
5939
5944static void
5945pm_compile_constant_path_write_node(rb_iseq_t *iseq, const pm_constant_path_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5946{
5947 const pm_node_location_t location = *node_location;
5948 const pm_constant_path_node_t *target = node->target;
5949 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
5950
5951 if (target->parent) {
5952 PM_COMPILE_NOT_POPPED((const pm_node_t *) target->parent);
5953 }
5954 else {
5955 PUSH_INSN1(ret, location, putobject, rb_cObject);
5956 }
5957
5958 if (shareability != 0) {
5959 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
5960 }
5961 else {
5962 PM_COMPILE_NOT_POPPED(node->value);
5963 }
5964
5965 if (!popped) {
5966 PUSH_INSN(ret, location, swap);
5967 PUSH_INSN1(ret, location, topn, INT2FIX(1));
5968 }
5969
5970 PUSH_INSN(ret, location, swap);
5971 PUSH_INSN1(ret, location, setconstant, name);
5972}
5973
5978static void
5979pm_compile_constant_path_and_write_node(rb_iseq_t *iseq, const pm_constant_path_and_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
5980{
5981 const pm_node_location_t location = *node_location;
5982 const pm_constant_path_node_t *target = node->target;
5983
5984 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
5985 LABEL *lfin = NEW_LABEL(location.line);
5986
5987 if (target->parent) {
5988 PM_COMPILE_NOT_POPPED(target->parent);
5989 }
5990 else {
5991 PUSH_INSN1(ret, location, putobject, rb_cObject);
5992 }
5993
5994 PUSH_INSN(ret, location, dup);
5995 PUSH_INSN1(ret, location, putobject, Qtrue);
5996 PUSH_INSN1(ret, location, getconstant, name);
5997
5998 if (!popped) PUSH_INSN(ret, location, dup);
5999 PUSH_INSNL(ret, location, branchunless, lfin);
6000
6001 if (!popped) PUSH_INSN(ret, location, pop);
6002
6003 if (shareability != 0) {
6004 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
6005 }
6006 else {
6007 PM_COMPILE_NOT_POPPED(node->value);
6008 }
6009
6010 if (popped) {
6011 PUSH_INSN1(ret, location, topn, INT2FIX(1));
6012 }
6013 else {
6014 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
6015 PUSH_INSN(ret, location, swap);
6016 }
6017
6018 PUSH_INSN1(ret, location, setconstant, name);
6019 PUSH_LABEL(ret, lfin);
6020
6021 if (!popped) PUSH_INSN(ret, location, swap);
6022 PUSH_INSN(ret, location, pop);
6023}
6024
6029static void
6030pm_compile_constant_path_or_write_node(rb_iseq_t *iseq, const pm_constant_path_or_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
6031{
6032 const pm_node_location_t location = *node_location;
6033 const pm_constant_path_node_t *target = node->target;
6034
6035 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
6036 LABEL *lassign = NEW_LABEL(location.line);
6037 LABEL *lfin = NEW_LABEL(location.line);
6038
6039 if (target->parent) {
6040 PM_COMPILE_NOT_POPPED(target->parent);
6041 }
6042 else {
6043 PUSH_INSN1(ret, location, putobject, rb_cObject);
6044 }
6045
6046 PUSH_INSN(ret, location, dup);
6047 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CONST_FROM), name, Qtrue);
6048 PUSH_INSNL(ret, location, branchunless, lassign);
6049
6050 PUSH_INSN(ret, location, dup);
6051 PUSH_INSN1(ret, location, putobject, Qtrue);
6052 PUSH_INSN1(ret, location, getconstant, name);
6053
6054 if (!popped) PUSH_INSN(ret, location, dup);
6055 PUSH_INSNL(ret, location, branchif, lfin);
6056
6057 if (!popped) PUSH_INSN(ret, location, pop);
6058 PUSH_LABEL(ret, lassign);
6059
6060 if (shareability != 0) {
6061 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
6062 }
6063 else {
6064 PM_COMPILE_NOT_POPPED(node->value);
6065 }
6066
6067 if (popped) {
6068 PUSH_INSN1(ret, location, topn, INT2FIX(1));
6069 }
6070 else {
6071 PUSH_INSN1(ret, location, dupn, INT2FIX(2));
6072 PUSH_INSN(ret, location, swap);
6073 }
6074
6075 PUSH_INSN1(ret, location, setconstant, name);
6076 PUSH_LABEL(ret, lfin);
6077
6078 if (!popped) PUSH_INSN(ret, location, swap);
6079 PUSH_INSN(ret, location, pop);
6080}
6081
6086static void
6087pm_compile_constant_path_operator_write_node(rb_iseq_t *iseq, const pm_constant_path_operator_write_node_t *node, const pm_node_flags_t shareability, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
6088{
6089 const pm_node_location_t location = *node_location;
6090 const pm_constant_path_node_t *target = node->target;
6091
6092 ID method_id = pm_constant_id_lookup(scope_node, node->binary_operator);
6093 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, target->name));
6094
6095 if (target->parent) {
6096 PM_COMPILE_NOT_POPPED(target->parent);
6097 }
6098 else {
6099 PUSH_INSN1(ret, location, putobject, rb_cObject);
6100 }
6101
6102 PUSH_INSN(ret, location, dup);
6103 PUSH_INSN1(ret, location, putobject, Qtrue);
6104 PUSH_INSN1(ret, location, getconstant, name);
6105
6106 if (shareability != 0) {
6107 pm_compile_shareable_constant_value(iseq, node->value, shareability, pm_constant_path_path(node->target, scope_node), ret, scope_node, true);
6108 }
6109 else {
6110 PM_COMPILE_NOT_POPPED(node->value);
6111 }
6112
6113 PUSH_CALL(ret, location, method_id, INT2FIX(1));
6114 PUSH_INSN(ret, location, swap);
6115
6116 if (!popped) {
6117 PUSH_INSN1(ret, location, topn, INT2FIX(1));
6118 PUSH_INSN(ret, location, swap);
6119 }
6120
6121 PUSH_INSN1(ret, location, setconstant, name);
6122}
6123
6130#define PM_CONTAINER_P(node) (PM_NODE_TYPE_P(node, PM_ARRAY_NODE) || PM_NODE_TYPE_P(node, PM_HASH_NODE) || PM_NODE_TYPE_P(node, PM_RANGE_NODE))
6131
6136static inline void
6137pm_compile_scope_node(rb_iseq_t *iseq, pm_scope_node_t *scope_node, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped)
6138{
6139 const pm_node_location_t location = *node_location;
6140 struct rb_iseq_constant_body *body = ISEQ_BODY(iseq);
6141
6142 pm_constant_id_list_t *locals = &scope_node->locals;
6143 pm_parameters_node_t *parameters_node = NULL;
6144 pm_node_list_t *keywords_list = NULL;
6145 pm_node_list_t *optionals_list = NULL;
6146 pm_node_list_t *posts_list = NULL;
6147 pm_node_list_t *requireds_list = NULL;
6148 pm_node_list_t *block_locals = NULL;
6149 bool trailing_comma = false;
6150
6151 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_CLASS_NODE) || PM_NODE_TYPE_P(scope_node->ast_node, PM_MODULE_NODE)) {
6152 PUSH_TRACE(ret, RUBY_EVENT_CLASS);
6153 }
6154
6155 if (scope_node->parameters != NULL) {
6156 switch (PM_NODE_TYPE(scope_node->parameters)) {
6157 case PM_BLOCK_PARAMETERS_NODE: {
6158 pm_block_parameters_node_t *cast = (pm_block_parameters_node_t *) scope_node->parameters;
6159 parameters_node = cast->parameters;
6160 block_locals = &cast->locals;
6161
6162 if (parameters_node) {
6163 if (parameters_node->rest && PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE)) {
6164 trailing_comma = true;
6165 }
6166 }
6167 break;
6168 }
6169 case PM_PARAMETERS_NODE: {
6170 parameters_node = (pm_parameters_node_t *) scope_node->parameters;
6171 break;
6172 }
6173 case PM_NUMBERED_PARAMETERS_NODE: {
6174 uint32_t maximum = ((const pm_numbered_parameters_node_t *) scope_node->parameters)->maximum;
6175 body->param.lead_num = maximum;
6176 body->param.flags.ambiguous_param0 = maximum == 1;
6177 break;
6178 }
6179 case PM_IT_PARAMETERS_NODE:
6180 body->param.lead_num = 1;
6181 body->param.flags.ambiguous_param0 = true;
6182 break;
6183 default:
6184 rb_bug("Unexpected node type for parameters: %s", pm_node_type_to_str(PM_NODE_TYPE(scope_node->parameters)));
6185 }
6186 }
6187
6188 struct rb_iseq_param_keyword *keyword = NULL;
6189
6190 if (parameters_node) {
6191 optionals_list = &parameters_node->optionals;
6192 requireds_list = &parameters_node->requireds;
6193 keywords_list = &parameters_node->keywords;
6194 posts_list = &parameters_node->posts;
6195 }
6196 else if (scope_node->parameters && (PM_NODE_TYPE_P(scope_node->parameters, PM_NUMBERED_PARAMETERS_NODE) || PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE))) {
6197 body->param.opt_num = 0;
6198 }
6199 else {
6200 body->param.lead_num = 0;
6201 body->param.opt_num = 0;
6202 }
6203
6204 //********STEP 1**********
6205 // Goal: calculate the table size for the locals, accounting for
6206 // hidden variables and multi target nodes
6207 size_t locals_size = locals->size;
6208
6209 // Index lookup table buffer size is only the number of the locals
6210 st_table *index_lookup_table = st_init_numtable();
6211
6212 int table_size = (int) locals_size;
6213
6214 // For nodes have a hidden iteration variable. We add that to the local
6215 // table size here.
6216 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) table_size++;
6217
6218 if (keywords_list && keywords_list->size) {
6219 table_size++;
6220 }
6221
6222 if (requireds_list) {
6223 for (size_t i = 0; i < requireds_list->size; i++) {
6224 // For each MultiTargetNode, we're going to have one
6225 // additional anonymous local not represented in the locals table
6226 // We want to account for this in our table size
6227 pm_node_t *required = requireds_list->nodes[i];
6228 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
6229 table_size++;
6230 }
6231 else if (PM_NODE_TYPE_P(required, PM_REQUIRED_PARAMETER_NODE)) {
6232 if (PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6233 table_size++;
6234 }
6235 }
6236 }
6237 }
6238
6239 // If we have the `it` implicit local variable, we need to account for
6240 // it in the local table size.
6241 if (scope_node->parameters != NULL && PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
6242 table_size++;
6243 }
6244
6245 // Ensure there is enough room in the local table for any
6246 // parameters that have been repeated
6247 // ex: def underscore_parameters(_, _ = 1, _ = 2); _; end
6248 // ^^^^^^^^^^^^
6249 if (optionals_list && optionals_list->size) {
6250 for (size_t i = 0; i < optionals_list->size; i++) {
6251 pm_node_t * node = optionals_list->nodes[i];
6252 if (PM_NODE_FLAG_P(node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6253 table_size++;
6254 }
6255 }
6256 }
6257
6258 // If we have an anonymous "rest" node, we'll need to increase the local
6259 // table size to take it in to account.
6260 // def m(foo, *, bar)
6261 // ^
6262 if (parameters_node) {
6263 if (parameters_node->rest) {
6264 if (!(PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE))) {
6265 if (!((const pm_rest_parameter_node_t *) parameters_node->rest)->name || PM_NODE_FLAG_P(parameters_node->rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6266 table_size++;
6267 }
6268 }
6269 }
6270
6271 // def foo(_, **_); _; end
6272 // ^^^
6273 if (parameters_node->keyword_rest) {
6274 // def foo(...); end
6275 // ^^^
6276 // When we have a `...` as the keyword_rest, it's a forwarding_parameter_node and
6277 // we need to leave space for 4 locals: *, **, &, ...
6278 if (PM_NODE_TYPE_P(parameters_node->keyword_rest, PM_FORWARDING_PARAMETER_NODE)) {
6279 // Only optimize specifically methods like this: `foo(...)`
6280 if (requireds_list->size == 0 && optionals_list->size == 0 && keywords_list->size == 0) {
6281 ISEQ_BODY(iseq)->param.flags.use_block = TRUE;
6282 ISEQ_BODY(iseq)->param.flags.forwardable = TRUE;
6283 table_size += 1;
6284 }
6285 else {
6286 table_size += 4;
6287 }
6288 }
6289 else {
6290 const pm_keyword_rest_parameter_node_t *kw_rest = (const pm_keyword_rest_parameter_node_t *) parameters_node->keyword_rest;
6291
6292 // If it's anonymous or repeated, then we need to allocate stack space
6293 if (!kw_rest->name || PM_NODE_FLAG_P(kw_rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6294 table_size++;
6295 }
6296 }
6297 }
6298 }
6299
6300 if (posts_list) {
6301 for (size_t i = 0; i < posts_list->size; i++) {
6302 // For each MultiTargetNode, we're going to have one
6303 // additional anonymous local not represented in the locals table
6304 // We want to account for this in our table size
6305 pm_node_t *required = posts_list->nodes[i];
6306 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE) || PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6307 table_size++;
6308 }
6309 }
6310 }
6311
6312 if (keywords_list && keywords_list->size) {
6313 for (size_t i = 0; i < keywords_list->size; i++) {
6314 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
6315 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6316 table_size++;
6317 }
6318 }
6319 }
6320
6321 if (parameters_node && parameters_node->block) {
6322 const pm_block_parameter_node_t *block_node = (const pm_block_parameter_node_t *) parameters_node->block;
6323
6324 if (PM_NODE_FLAG_P(block_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER) || !block_node->name) {
6325 table_size++;
6326 }
6327 }
6328
6329 // We can create local_table_for_iseq with the correct size
6330 VALUE idtmp = 0;
6331 rb_ast_id_table_t *local_table_for_iseq = ALLOCV(idtmp, sizeof(rb_ast_id_table_t) + table_size * sizeof(ID));
6332 local_table_for_iseq->size = table_size;
6333
6334 //********END OF STEP 1**********
6335
6336 //********STEP 2**********
6337 // Goal: populate iv index table as well as local table, keeping the
6338 // layout of the local table consistent with the layout of the
6339 // stack when calling the method
6340 //
6341 // Do a first pass on all of the parameters, setting their values in
6342 // the local_table_for_iseq, _except_ for Multis who get a hidden
6343 // variable in this step, and will get their names inserted in step 3
6344
6345 // local_index is a cursor that keeps track of the current
6346 // index into local_table_for_iseq. The local table is actually a list,
6347 // and the order of that list must match the order of the items pushed
6348 // on the stack. We need to take in to account things pushed on the
6349 // stack that _might not have a name_ (for example array destructuring).
6350 // This index helps us know which item we're dealing with and also give
6351 // those anonymous items temporary names (as below)
6352 int local_index = 0;
6353
6354 // Here we figure out local table indices and insert them in to the
6355 // index lookup table and local tables.
6356 //
6357 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6358 // ^^^^^^^^^^^^^
6359 if (requireds_list && requireds_list->size) {
6360 for (size_t i = 0; i < requireds_list->size; i++, local_index++) {
6361 ID local;
6362
6363 // For each MultiTargetNode, we're going to have one additional
6364 // anonymous local not represented in the locals table. We want
6365 // to account for this in our table size.
6366 pm_node_t *required = requireds_list->nodes[i];
6367
6368 switch (PM_NODE_TYPE(required)) {
6369 case PM_MULTI_TARGET_NODE: {
6370 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6371 // ^^^^^^^^^^
6372 local = rb_make_temporary_id(local_index);
6373 local_table_for_iseq->ids[local_index] = local;
6374 break;
6375 }
6376 case PM_REQUIRED_PARAMETER_NODE: {
6377 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6378 // ^
6379 const pm_required_parameter_node_t *param = (const pm_required_parameter_node_t *) required;
6380
6381 if (PM_NODE_FLAG_P(required, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6382 ID local = pm_constant_id_lookup(scope_node, param->name);
6383 local_table_for_iseq->ids[local_index] = local;
6384 }
6385 else {
6386 pm_insert_local_index(param->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6387 }
6388
6389 break;
6390 }
6391 default:
6392 rb_bug("Unsupported node in requireds in parameters %s", pm_node_type_to_str(PM_NODE_TYPE(required)));
6393 }
6394 }
6395
6396 body->param.lead_num = (int) requireds_list->size;
6397 body->param.flags.has_lead = true;
6398 }
6399
6400 if (scope_node->parameters != NULL && PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
6401 local_table_for_iseq->ids[local_index++] = idItImplicit;
6402 }
6403
6404 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6405 // ^^^^^
6406 if (optionals_list && optionals_list->size) {
6407 body->param.opt_num = (int) optionals_list->size;
6408 body->param.flags.has_opt = true;
6409
6410 for (size_t i = 0; i < optionals_list->size; i++, local_index++) {
6411 pm_node_t * node = optionals_list->nodes[i];
6412 pm_constant_id_t name = ((const pm_optional_parameter_node_t *) node)->name;
6413
6414 if (PM_NODE_FLAG_P(node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6415 ID local = pm_constant_id_lookup(scope_node, name);
6416 local_table_for_iseq->ids[local_index] = local;
6417 }
6418 else {
6419 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6420 }
6421 }
6422 }
6423
6424 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6425 // ^^
6426 if (parameters_node && parameters_node->rest) {
6427 body->param.rest_start = local_index;
6428
6429 // If there's a trailing comma, we'll have an implicit rest node,
6430 // and we don't want it to impact the rest variables on param
6431 if (!(PM_NODE_TYPE_P(parameters_node->rest, PM_IMPLICIT_REST_NODE))) {
6432 body->param.flags.has_rest = true;
6433 RUBY_ASSERT(body->param.rest_start != -1);
6434
6435 pm_constant_id_t name = ((const pm_rest_parameter_node_t *) parameters_node->rest)->name;
6436
6437 if (name) {
6438 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6439 // ^^
6440 if (PM_NODE_FLAG_P(parameters_node->rest, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6441 ID local = pm_constant_id_lookup(scope_node, name);
6442 local_table_for_iseq->ids[local_index] = local;
6443 }
6444 else {
6445 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6446 }
6447 }
6448 else {
6449 // def foo(a, (b, *c, d), e = 1, *, g, (h, *i, j), k:, l: 1, **m, &n)
6450 // ^
6451 body->param.flags.anon_rest = true;
6452 pm_insert_local_special(idMULT, local_index, index_lookup_table, local_table_for_iseq);
6453 }
6454
6455 local_index++;
6456 }
6457 }
6458
6459 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6460 // ^^^^^^^^^^^^^
6461 if (posts_list && posts_list->size) {
6462 body->param.post_num = (int) posts_list->size;
6463 body->param.post_start = local_index;
6464 body->param.flags.has_post = true;
6465
6466 for (size_t i = 0; i < posts_list->size; i++, local_index++) {
6467 ID local;
6468
6469 // For each MultiTargetNode, we're going to have one additional
6470 // anonymous local not represented in the locals table. We want
6471 // to account for this in our table size.
6472 const pm_node_t *post_node = posts_list->nodes[i];
6473
6474 switch (PM_NODE_TYPE(post_node)) {
6475 case PM_MULTI_TARGET_NODE: {
6476 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6477 // ^^^^^^^^^^
6478 local = rb_make_temporary_id(local_index);
6479 local_table_for_iseq->ids[local_index] = local;
6480 break;
6481 }
6482 case PM_REQUIRED_PARAMETER_NODE: {
6483 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6484 // ^
6485 const pm_required_parameter_node_t *param = (const pm_required_parameter_node_t *) post_node;
6486
6487 if (PM_NODE_FLAG_P(param, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6488 ID local = pm_constant_id_lookup(scope_node, param->name);
6489 local_table_for_iseq->ids[local_index] = local;
6490 }
6491 else {
6492 pm_insert_local_index(param->name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6493 }
6494 break;
6495 }
6496 default:
6497 rb_bug("Unsupported node in posts in parameters %s", pm_node_type_to_str(PM_NODE_TYPE(post_node)));
6498 }
6499 }
6500 }
6501
6502 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6503 // ^^^^^^^^
6504 // Keywords create an internal variable on the parse tree
6505 if (keywords_list && keywords_list->size) {
6506 keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
6507 keyword->num = (int) keywords_list->size;
6508
6509 const VALUE default_values = rb_ary_hidden_new(1);
6510 const VALUE complex_mark = rb_str_tmp_new(0);
6511
6512 for (size_t i = 0; i < keywords_list->size; i++) {
6513 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
6514 pm_constant_id_t name;
6515
6516 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6517 // ^^
6518 if (PM_NODE_TYPE_P(keyword_parameter_node, PM_REQUIRED_KEYWORD_PARAMETER_NODE)) {
6519 name = ((const pm_required_keyword_parameter_node_t *) keyword_parameter_node)->name;
6520 keyword->required_num++;
6521 ID local = pm_constant_id_lookup(scope_node, name);
6522
6523 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6524 local_table_for_iseq->ids[local_index] = local;
6525 }
6526 else {
6527 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6528 }
6529 local_index++;
6530 }
6531 }
6532
6533 for (size_t i = 0; i < keywords_list->size; i++) {
6534 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
6535 pm_constant_id_t name;
6536
6537 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6538 // ^^^^
6539 if (PM_NODE_TYPE_P(keyword_parameter_node, PM_OPTIONAL_KEYWORD_PARAMETER_NODE)) {
6540 const pm_optional_keyword_parameter_node_t *cast = ((const pm_optional_keyword_parameter_node_t *) keyword_parameter_node);
6541
6542 pm_node_t *value = cast->value;
6543 name = cast->name;
6544
6545 if (PM_NODE_FLAG_P(value, PM_NODE_FLAG_STATIC_LITERAL) && !PM_CONTAINER_P(value)) {
6546 rb_ary_push(default_values, pm_static_literal_value(iseq, value, scope_node));
6547 }
6548 else {
6549 rb_ary_push(default_values, complex_mark);
6550 }
6551
6552 ID local = pm_constant_id_lookup(scope_node, name);
6553 if (PM_NODE_FLAG_P(keyword_parameter_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6554 local_table_for_iseq->ids[local_index] = local;
6555 }
6556 else {
6557 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6558 }
6559 local_index++;
6560 }
6561
6562 }
6563
6564 if (RARRAY_LEN(default_values)) {
6565 VALUE *dvs = ALLOC_N(VALUE, RARRAY_LEN(default_values));
6566
6567 for (int i = 0; i < RARRAY_LEN(default_values); i++) {
6568 VALUE dv = RARRAY_AREF(default_values, i);
6569 if (dv == complex_mark) dv = Qundef;
6570 RB_OBJ_WRITE(iseq, &dvs[i], dv);
6571 }
6572
6573 keyword->default_values = dvs;
6574 }
6575
6576 // Hidden local for keyword arguments
6577 keyword->bits_start = local_index;
6578 ID local = rb_make_temporary_id(local_index);
6579 local_table_for_iseq->ids[local_index] = local;
6580 local_index++;
6581
6582 body->param.keyword = keyword;
6583 body->param.flags.has_kw = true;
6584 }
6585
6586 if (body->type == ISEQ_TYPE_BLOCK && local_index == 1 && requireds_list && requireds_list->size == 1 && !trailing_comma) {
6587 body->param.flags.ambiguous_param0 = true;
6588 }
6589
6590 if (parameters_node) {
6591 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6592 // ^^^
6593 if (parameters_node->keyword_rest) {
6594 switch (PM_NODE_TYPE(parameters_node->keyword_rest)) {
6595 case PM_NO_KEYWORDS_PARAMETER_NODE: {
6596 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **nil, &n)
6597 // ^^^^^
6598 body->param.flags.accepts_no_kwarg = true;
6599 break;
6600 }
6601 case PM_KEYWORD_REST_PARAMETER_NODE: {
6602 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6603 // ^^^
6604 const pm_keyword_rest_parameter_node_t *kw_rest_node = (const pm_keyword_rest_parameter_node_t *) parameters_node->keyword_rest;
6605 if (!body->param.flags.has_kw) {
6606 body->param.keyword = keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
6607 }
6608
6609 keyword->rest_start = local_index;
6610 body->param.flags.has_kwrest = true;
6611
6612 pm_constant_id_t constant_id = kw_rest_node->name;
6613 if (constant_id) {
6614 if (PM_NODE_FLAG_P(kw_rest_node, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6615 ID local = pm_constant_id_lookup(scope_node, constant_id);
6616 local_table_for_iseq->ids[local_index] = local;
6617 }
6618 else {
6619 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6620 }
6621 }
6622 else {
6623 body->param.flags.anon_kwrest = true;
6624 pm_insert_local_special(idPow, local_index, index_lookup_table, local_table_for_iseq);
6625 }
6626
6627 local_index++;
6628 break;
6629 }
6630 case PM_FORWARDING_PARAMETER_NODE: {
6631 // def foo(...)
6632 // ^^^
6633 if (!ISEQ_BODY(iseq)->param.flags.forwardable) {
6634 // Add the anonymous *
6635 body->param.rest_start = local_index;
6636 body->param.flags.has_rest = true;
6637 body->param.flags.anon_rest = true;
6638 pm_insert_local_special(idMULT, local_index++, index_lookup_table, local_table_for_iseq);
6639
6640 // Add the anonymous **
6641 RUBY_ASSERT(!body->param.flags.has_kw);
6642 body->param.flags.has_kw = false;
6643 body->param.flags.has_kwrest = true;
6644 body->param.flags.anon_kwrest = true;
6645 body->param.keyword = keyword = ZALLOC_N(struct rb_iseq_param_keyword, 1);
6646 keyword->rest_start = local_index;
6647 pm_insert_local_special(idPow, local_index++, index_lookup_table, local_table_for_iseq);
6648
6649 // Add the anonymous &
6650 body->param.block_start = local_index;
6651 body->param.flags.has_block = true;
6652 pm_insert_local_special(idAnd, local_index++, index_lookup_table, local_table_for_iseq);
6653 }
6654
6655 // Add the ...
6656 pm_insert_local_special(idDot3, local_index++, index_lookup_table, local_table_for_iseq);
6657 break;
6658 }
6659 default:
6660 rb_bug("node type %s not expected as keyword_rest", pm_node_type_to_str(PM_NODE_TYPE(parameters_node->keyword_rest)));
6661 }
6662 }
6663
6664 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6665 // ^^
6666 if (parameters_node->block) {
6667 body->param.block_start = local_index;
6668 body->param.flags.has_block = true;
6669 iseq_set_use_block(iseq);
6670
6671 pm_constant_id_t name = ((const pm_block_parameter_node_t *) parameters_node->block)->name;
6672
6673 if (name) {
6674 if (PM_NODE_FLAG_P(parameters_node->block, PM_PARAMETER_FLAGS_REPEATED_PARAMETER)) {
6675 ID local = pm_constant_id_lookup(scope_node, name);
6676 local_table_for_iseq->ids[local_index] = local;
6677 }
6678 else {
6679 pm_insert_local_index(name, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6680 }
6681 }
6682 else {
6683 pm_insert_local_special(idAnd, local_index, index_lookup_table, local_table_for_iseq);
6684 }
6685
6686 local_index++;
6687 }
6688 }
6689
6690 //********END OF STEP 2**********
6691 // The local table is now consistent with expected
6692 // stack layout
6693
6694 // If there's only one required element in the parameters
6695 // CRuby needs to recognize it as an ambiguous parameter
6696
6697 //********STEP 3**********
6698 // Goal: fill in the names of the parameters in MultiTargetNodes
6699 //
6700 // Go through requireds again to set the multis
6701
6702 if (requireds_list && requireds_list->size) {
6703 for (size_t i = 0; i < requireds_list->size; i++) {
6704 // For each MultiTargetNode, we're going to have one
6705 // additional anonymous local not represented in the locals table
6706 // We want to account for this in our table size
6707 const pm_node_t *required = requireds_list->nodes[i];
6708
6709 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
6710 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) required, index_lookup_table, local_table_for_iseq, scope_node, local_index);
6711 }
6712 }
6713 }
6714
6715 // Go through posts again to set the multis
6716 if (posts_list && posts_list->size) {
6717 for (size_t i = 0; i < posts_list->size; i++) {
6718 // For each MultiTargetNode, we're going to have one
6719 // additional anonymous local not represented in the locals table
6720 // We want to account for this in our table size
6721 const pm_node_t *post = posts_list->nodes[i];
6722
6723 if (PM_NODE_TYPE_P(post, PM_MULTI_TARGET_NODE)) {
6724 local_index = pm_compile_destructured_param_locals((const pm_multi_target_node_t *) post, index_lookup_table, local_table_for_iseq, scope_node, local_index);
6725 }
6726 }
6727 }
6728
6729 // Set any anonymous locals for the for node
6730 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) {
6731 if (PM_NODE_TYPE_P(((const pm_for_node_t *) scope_node->ast_node)->index, PM_LOCAL_VARIABLE_TARGET_NODE)) {
6732 body->param.lead_num++;
6733 }
6734 else {
6735 body->param.rest_start = local_index;
6736 body->param.flags.has_rest = true;
6737 }
6738
6739 ID local = rb_make_temporary_id(local_index);
6740 local_table_for_iseq->ids[local_index] = local;
6741 local_index++;
6742 }
6743
6744 // Fill in any NumberedParameters, if they exist
6745 if (scope_node->parameters && PM_NODE_TYPE_P(scope_node->parameters, PM_NUMBERED_PARAMETERS_NODE)) {
6746 int maximum = ((const pm_numbered_parameters_node_t *) scope_node->parameters)->maximum;
6747 RUBY_ASSERT(0 < maximum && maximum <= 9);
6748 for (int i = 0; i < maximum; i++, local_index++) {
6749 const uint8_t param_name[] = { '_', '1' + i };
6750 pm_constant_id_t constant_id = pm_constant_pool_find(&scope_node->parser->constant_pool, param_name, 2);
6751 RUBY_ASSERT(constant_id && "parser should fill in any gaps in numbered parameters");
6752 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6753 }
6754 body->param.lead_num = maximum;
6755 body->param.flags.has_lead = true;
6756 }
6757
6758 // Fill in the anonymous `it` parameter, if it exists
6759 if (scope_node->parameters && PM_NODE_TYPE_P(scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
6760 body->param.lead_num = 1;
6761 body->param.flags.has_lead = true;
6762 }
6763
6764 //********END OF STEP 3**********
6765
6766 //********STEP 4**********
6767 // Goal: fill in the method body locals
6768 // To be explicit, these are the non-parameter locals
6769 // We fill in the block_locals, if they exist
6770 // lambda { |x; y| y }
6771 // ^
6772 if (block_locals && block_locals->size) {
6773 for (size_t i = 0; i < block_locals->size; i++, local_index++) {
6774 pm_constant_id_t constant_id = ((const pm_block_local_variable_node_t *) block_locals->nodes[i])->name;
6775 pm_insert_local_index(constant_id, local_index, index_lookup_table, local_table_for_iseq, scope_node);
6776 }
6777 }
6778
6779 // Fill in any locals we missed
6780 if (scope_node->locals.size) {
6781 for (size_t i = 0; i < scope_node->locals.size; i++) {
6782 pm_constant_id_t constant_id = locals->ids[i];
6783 if (constant_id) {
6784 struct pm_local_table_insert_ctx ctx;
6785 ctx.scope_node = scope_node;
6786 ctx.local_table_for_iseq = local_table_for_iseq;
6787 ctx.local_index = local_index;
6788
6789 st_update(index_lookup_table, (st_data_t)constant_id, pm_local_table_insert_func, (st_data_t)&ctx);
6790
6791 local_index = ctx.local_index;
6792 }
6793 }
6794 }
6795
6796 //********END OF STEP 4**********
6797
6798 // We set the index_lookup_table on the scope node so we can
6799 // refer to the parameters correctly
6800 if (scope_node->index_lookup_table) {
6801 st_free_table(scope_node->index_lookup_table);
6802 }
6803 scope_node->index_lookup_table = index_lookup_table;
6804 iseq_calc_param_size(iseq);
6805
6806 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
6807 // We're treating `...` as a parameter so that frame
6808 // pushing won't clobber it.
6809 ISEQ_BODY(iseq)->param.size += 1;
6810 }
6811
6812 // FIXME: args?
6813 iseq_set_local_table(iseq, local_table_for_iseq, 0);
6814 iseq_set_parameters_lvar_state(iseq);
6815
6816 scope_node->local_table_for_iseq_size = local_table_for_iseq->size;
6817
6818 if (keyword != NULL) {
6819 size_t keyword_start_index = keyword->bits_start - keyword->num;
6820 keyword->table = (ID *)&ISEQ_BODY(iseq)->local_table[keyword_start_index];
6821 }
6822
6823 //********STEP 5************
6824 // Goal: compile anything that needed to be compiled
6825 if (optionals_list && optionals_list->size) {
6826 LABEL **opt_table = (LABEL **) ALLOC_N(VALUE, optionals_list->size + 1);
6827 LABEL *label;
6828
6829 // TODO: Should we make an api for NEW_LABEL where you can pass
6830 // a pointer to the label it should fill out? We already
6831 // have a list of labels allocated above so it seems wasteful
6832 // to do the copies.
6833 for (size_t i = 0; i < optionals_list->size; i++) {
6834 label = NEW_LABEL(location.line);
6835 opt_table[i] = label;
6836 PUSH_LABEL(ret, label);
6837 pm_node_t *optional_node = optionals_list->nodes[i];
6838 PM_COMPILE_NOT_POPPED(optional_node);
6839 }
6840
6841 // Set the last label
6842 label = NEW_LABEL(location.line);
6843 opt_table[optionals_list->size] = label;
6844 PUSH_LABEL(ret, label);
6845
6846 body->param.opt_table = (const VALUE *) opt_table;
6847 }
6848
6849 if (keywords_list && keywords_list->size) {
6850 size_t optional_index = 0;
6851 for (size_t i = 0; i < keywords_list->size; i++) {
6852 pm_node_t *keyword_parameter_node = keywords_list->nodes[i];
6853 pm_constant_id_t name;
6854
6855 switch (PM_NODE_TYPE(keyword_parameter_node)) {
6856 case PM_OPTIONAL_KEYWORD_PARAMETER_NODE: {
6857 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6858 // ^^^^
6859 const pm_optional_keyword_parameter_node_t *cast = ((const pm_optional_keyword_parameter_node_t *) keyword_parameter_node);
6860
6861 pm_node_t *value = cast->value;
6862 name = cast->name;
6863
6864 if (!PM_NODE_FLAG_P(value, PM_NODE_FLAG_STATIC_LITERAL) || PM_CONTAINER_P(value)) {
6865 LABEL *end_label = NEW_LABEL(location.line);
6866
6867 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, name, 0);
6868 int kw_bits_idx = table_size - body->param.keyword->bits_start;
6869 PUSH_INSN2(ret, location, checkkeyword, INT2FIX(kw_bits_idx + VM_ENV_DATA_SIZE - 1), INT2FIX(optional_index));
6870 PUSH_INSNL(ret, location, branchif, end_label);
6871 PM_COMPILE(value);
6872 PUSH_SETLOCAL(ret, location, index.index, index.level);
6873 PUSH_LABEL(ret, end_label);
6874 }
6875 optional_index++;
6876 break;
6877 }
6878 case PM_REQUIRED_KEYWORD_PARAMETER_NODE:
6879 // def foo(a, (b, *c, d), e = 1, *f, g, (h, *i, j), k:, l: 1, **m, &n)
6880 // ^^
6881 break;
6882 default:
6883 rb_bug("Unexpected keyword parameter node type %s", pm_node_type_to_str(PM_NODE_TYPE(keyword_parameter_node)));
6884 }
6885 }
6886 }
6887
6888 if (requireds_list && requireds_list->size) {
6889 for (size_t i = 0; i < requireds_list->size; i++) {
6890 // For each MultiTargetNode, we're going to have one additional
6891 // anonymous local not represented in the locals table. We want
6892 // to account for this in our table size.
6893 const pm_node_t *required = requireds_list->nodes[i];
6894
6895 if (PM_NODE_TYPE_P(required, PM_MULTI_TARGET_NODE)) {
6896 PUSH_GETLOCAL(ret, location, table_size - (int)i, 0);
6897 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) required, ret, scope_node);
6898 }
6899 }
6900 }
6901
6902 if (posts_list && posts_list->size) {
6903 for (size_t i = 0; i < posts_list->size; i++) {
6904 // For each MultiTargetNode, we're going to have one additional
6905 // anonymous local not represented in the locals table. We want
6906 // to account for this in our table size.
6907 const pm_node_t *post = posts_list->nodes[i];
6908
6909 if (PM_NODE_TYPE_P(post, PM_MULTI_TARGET_NODE)) {
6910 PUSH_GETLOCAL(ret, location, table_size - body->param.post_start - (int) i, 0);
6911 pm_compile_destructured_param_writes(iseq, (const pm_multi_target_node_t *) post, ret, scope_node);
6912 }
6913 }
6914 }
6915
6916 switch (body->type) {
6917 case ISEQ_TYPE_PLAIN: {
6918 RUBY_ASSERT(PM_NODE_TYPE_P(scope_node->ast_node, PM_INTERPOLATED_REGULAR_EXPRESSION_NODE));
6919
6921 pm_compile_regexp_dynamic(iseq, (const pm_node_t *) cast, &cast->parts, &location, ret, popped, scope_node);
6922
6923 break;
6924 }
6925 case ISEQ_TYPE_BLOCK: {
6926 LABEL *start = ISEQ_COMPILE_DATA(iseq)->start_label = NEW_LABEL(0);
6927 LABEL *end = ISEQ_COMPILE_DATA(iseq)->end_label = NEW_LABEL(0);
6928 const pm_node_location_t block_location = { .line = body->location.first_lineno, .node_id = scope_node->ast_node->node_id };
6929
6930 start->rescued = LABEL_RESCUE_BEG;
6931 end->rescued = LABEL_RESCUE_END;
6932
6933 // For nodes automatically assign the iteration variable to whatever
6934 // index variable. We need to handle that write here because it has
6935 // to happen in the context of the block. Note that this happens
6936 // before the B_CALL tracepoint event.
6937 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_FOR_NODE)) {
6938 pm_compile_for_node_index(iseq, ((const pm_for_node_t *) scope_node->ast_node)->index, ret, scope_node);
6939 }
6940
6941 PUSH_TRACE(ret, RUBY_EVENT_B_CALL);
6942 PUSH_INSN(ret, block_location, nop);
6943 PUSH_LABEL(ret, start);
6944
6945 if (scope_node->body != NULL) {
6946 switch (PM_NODE_TYPE(scope_node->ast_node)) {
6947 case PM_POST_EXECUTION_NODE: {
6948 const pm_post_execution_node_t *cast = (const pm_post_execution_node_t *) scope_node->ast_node;
6949 PUSH_INSN1(ret, block_location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
6950
6951 // We create another ScopeNode from the statements within the PostExecutionNode
6952 pm_scope_node_t next_scope_node;
6953 pm_scope_node_init((const pm_node_t *) cast->statements, &next_scope_node, scope_node);
6954
6955 const rb_iseq_t *block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(body->parent_iseq), ISEQ_TYPE_BLOCK, location.line);
6956 pm_scope_node_destroy(&next_scope_node);
6957
6958 PUSH_CALL_WITH_BLOCK(ret, block_location, id_core_set_postexe, INT2FIX(0), block);
6959 break;
6960 }
6961 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
6963 pm_compile_regexp_dynamic(iseq, (const pm_node_t *) cast, &cast->parts, &location, ret, popped, scope_node);
6964 break;
6965 }
6966 default:
6967 pm_compile_node(iseq, scope_node->body, ret, popped, scope_node);
6968 break;
6969 }
6970 }
6971 else {
6972 PUSH_INSN(ret, block_location, putnil);
6973 }
6974
6975 PUSH_LABEL(ret, end);
6976 PUSH_TRACE(ret, RUBY_EVENT_B_RETURN);
6977 ISEQ_COMPILE_DATA(iseq)->last_line = body->location.code_location.end_pos.lineno;
6978
6979 /* wide range catch handler must put at last */
6980 PUSH_CATCH_ENTRY(CATCH_TYPE_REDO, start, end, NULL, start);
6981 PUSH_CATCH_ENTRY(CATCH_TYPE_NEXT, start, end, NULL, end);
6982 break;
6983 }
6984 case ISEQ_TYPE_ENSURE: {
6985 const pm_node_location_t statements_location = (scope_node->body != NULL ? PM_NODE_START_LOCATION(scope_node->parser, scope_node->body) : location);
6986 iseq_set_exception_local_table(iseq);
6987
6988 if (scope_node->body != NULL) {
6989 PM_COMPILE_POPPED((const pm_node_t *) scope_node->body);
6990 }
6991
6992 PUSH_GETLOCAL(ret, statements_location, 1, 0);
6993 PUSH_INSN1(ret, statements_location, throw, INT2FIX(0));
6994 return;
6995 }
6996 case ISEQ_TYPE_METHOD: {
6997 ISEQ_COMPILE_DATA(iseq)->root_node = (const void *) scope_node->body;
6998 PUSH_TRACE(ret, RUBY_EVENT_CALL);
6999
7000 if (scope_node->body) {
7001 PM_COMPILE((const pm_node_t *) scope_node->body);
7002 }
7003 else {
7004 PUSH_INSN(ret, location, putnil);
7005 }
7006
7007 ISEQ_COMPILE_DATA(iseq)->root_node = (const void *) scope_node->body;
7008 PUSH_TRACE(ret, RUBY_EVENT_RETURN);
7009
7010 ISEQ_COMPILE_DATA(iseq)->last_line = body->location.code_location.end_pos.lineno;
7011 break;
7012 }
7013 case ISEQ_TYPE_RESCUE: {
7014 iseq_set_exception_local_table(iseq);
7015 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_RESCUE_MODIFIER_NODE)) {
7016 LABEL *lab = NEW_LABEL(location.line);
7017 LABEL *rescue_end = NEW_LABEL(location.line);
7018 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
7019 PUSH_INSN1(ret, location, putobject, rb_eStandardError);
7020 PUSH_INSN1(ret, location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
7021 PUSH_INSNL(ret, location, branchif, lab);
7022 PUSH_INSNL(ret, location, jump, rescue_end);
7023 PUSH_LABEL(ret, lab);
7024 PUSH_TRACE(ret, RUBY_EVENT_RESCUE);
7025 PM_COMPILE((const pm_node_t *) scope_node->body);
7026 PUSH_INSN(ret, location, leave);
7027 PUSH_LABEL(ret, rescue_end);
7028 PUSH_GETLOCAL(ret, location, LVAR_ERRINFO, 0);
7029 }
7030 else {
7031 PM_COMPILE((const pm_node_t *) scope_node->ast_node);
7032 }
7033 PUSH_INSN1(ret, location, throw, INT2FIX(0));
7034
7035 return;
7036 }
7037 default:
7038 if (scope_node->body) {
7039 PM_COMPILE((const pm_node_t *) scope_node->body);
7040 }
7041 else {
7042 PUSH_INSN(ret, location, putnil);
7043 }
7044 break;
7045 }
7046
7047 if (PM_NODE_TYPE_P(scope_node->ast_node, PM_CLASS_NODE) || PM_NODE_TYPE_P(scope_node->ast_node, PM_MODULE_NODE)) {
7048 const pm_node_location_t end_location = PM_NODE_END_LOCATION(scope_node->parser, scope_node->ast_node);
7049 PUSH_TRACE(ret, RUBY_EVENT_END);
7050 ISEQ_COMPILE_DATA(iseq)->last_line = end_location.line;
7051 }
7052
7053 if (!PM_NODE_TYPE_P(scope_node->ast_node, PM_ENSURE_NODE)) {
7054 const pm_node_location_t location = { .line = ISEQ_COMPILE_DATA(iseq)->last_line, .node_id = scope_node->ast_node->node_id };
7055 PUSH_INSN(ret, location, leave);
7056 }
7057}
7058
7059static inline void
7060pm_compile_alias_global_variable_node(rb_iseq_t *iseq, const pm_alias_global_variable_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7061{
7062 // alias $foo $bar
7063 // ^^^^^^^^^^^^^^^
7064 PUSH_INSN1(ret, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7065
7066 {
7067 const pm_location_t *name_loc = &node->new_name->location;
7068 VALUE operand = ID2SYM(rb_intern3((const char *) name_loc->start, name_loc->end - name_loc->start, scope_node->encoding));
7069 PUSH_INSN1(ret, *location, putobject, operand);
7070 }
7071
7072 {
7073 const pm_location_t *name_loc = &node->old_name->location;
7074 VALUE operand = ID2SYM(rb_intern3((const char *) name_loc->start, name_loc->end - name_loc->start, scope_node->encoding));
7075 PUSH_INSN1(ret, *location, putobject, operand);
7076 }
7077
7078 PUSH_SEND(ret, *location, id_core_set_variable_alias, INT2FIX(2));
7079 if (popped) PUSH_INSN(ret, *location, pop);
7080}
7081
7082static inline void
7083pm_compile_alias_method_node(rb_iseq_t *iseq, const pm_alias_method_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7084{
7085 PUSH_INSN1(ret, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7086 PUSH_INSN1(ret, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CBASE));
7087 PM_COMPILE_NOT_POPPED(node->new_name);
7088 PM_COMPILE_NOT_POPPED(node->old_name);
7089
7090 PUSH_SEND(ret, *location, id_core_set_method_alias, INT2FIX(3));
7091 if (popped) PUSH_INSN(ret, *location, pop);
7092}
7093
7094static inline void
7095pm_compile_and_node(rb_iseq_t *iseq, const pm_and_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7096{
7097 LABEL *end_label = NEW_LABEL(location->line);
7098
7099 PM_COMPILE_NOT_POPPED(node->left);
7100 if (!popped) PUSH_INSN(ret, *location, dup);
7101 PUSH_INSNL(ret, *location, branchunless, end_label);
7102
7103 if (!popped) PUSH_INSN(ret, *location, pop);
7104 PM_COMPILE(node->right);
7105 PUSH_LABEL(ret, end_label);
7106}
7107
7108static inline void
7109pm_compile_array_node(rb_iseq_t *iseq, const pm_node_t *node, const pm_node_list_t *elements, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7110{
7111 // If every node in the array is static, then we can compile the entire
7112 // array now instead of later.
7113 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
7114 // We're only going to compile this node if it's not popped. If it
7115 // is popped, then we know we don't need to do anything since it's
7116 // statically known.
7117 if (!popped) {
7118 if (elements->size) {
7119 VALUE value = pm_static_literal_value(iseq, node, scope_node);
7120 RB_OBJ_SET_FROZEN_SHAREABLE(value);
7121 PUSH_INSN1(ret, *location, duparray, value);
7122 }
7123 else {
7124 PUSH_INSN1(ret, *location, newarray, INT2FIX(0));
7125 }
7126 }
7127 return;
7128 }
7129
7130 // Here since we know there are possible side-effects inside the
7131 // array contents, we're going to build it entirely at runtime.
7132 // We'll do this by pushing all of the elements onto the stack and
7133 // then combining them with newarray.
7134 //
7135 // If this array is popped, then this serves only to ensure we enact
7136 // all side-effects (like method calls) that are contained within
7137 // the array contents.
7138 //
7139 // We treat all sequences of non-splat elements as their
7140 // own arrays, followed by a newarray, and then continually
7141 // concat the arrays with the SplatNode nodes.
7142 const int max_new_array_size = 0x100;
7143 const unsigned int min_tmp_array_size = 0x40;
7144
7145 int new_array_size = 0;
7146 bool first_chunk = true;
7147
7148 // This is an optimization wherein we keep track of whether or not
7149 // the previous element was a static literal. If it was, then we do
7150 // not attempt to check if we have a subarray that can be optimized.
7151 // If it was not, then we do check.
7152 bool static_literal = false;
7153
7154 // Either create a new array, or push to the existing array.
7155#define FLUSH_CHUNK \
7156 if (new_array_size) { \
7157 if (first_chunk) PUSH_INSN1(ret, *location, newarray, INT2FIX(new_array_size)); \
7158 else PUSH_INSN1(ret, *location, pushtoarray, INT2FIX(new_array_size)); \
7159 first_chunk = false; \
7160 new_array_size = 0; \
7161 }
7162
7163 for (size_t index = 0; index < elements->size; index++) {
7164 const pm_node_t *element = elements->nodes[index];
7165
7166 if (PM_NODE_TYPE_P(element, PM_SPLAT_NODE)) {
7167 FLUSH_CHUNK;
7168
7169 const pm_splat_node_t *splat_element = (const pm_splat_node_t *) element;
7170 if (splat_element->expression) {
7171 PM_COMPILE_NOT_POPPED(splat_element->expression);
7172 }
7173 else {
7174 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_MULT, 0);
7175 PUSH_GETLOCAL(ret, *location, index.index, index.level);
7176 }
7177
7178 if (first_chunk) {
7179 // If this is the first element of the array then we
7180 // need to splatarray the elements into the list.
7181 PUSH_INSN1(ret, *location, splatarray, Qtrue);
7182 first_chunk = false;
7183 }
7184 else {
7185 PUSH_INSN(ret, *location, concattoarray);
7186 }
7187
7188 static_literal = false;
7189 }
7190 else if (PM_NODE_TYPE_P(element, PM_KEYWORD_HASH_NODE)) {
7191 if (new_array_size == 0 && first_chunk) {
7192 PUSH_INSN1(ret, *location, newarray, INT2FIX(0));
7193 first_chunk = false;
7194 }
7195 else {
7196 FLUSH_CHUNK;
7197 }
7198
7199 // If we get here, then this is the last element of the
7200 // array/arguments, because it cannot be followed by
7201 // anything else without a syntax error. This looks like:
7202 //
7203 // [foo, bar, baz: qux]
7204 // ^^^^^^^^
7205 //
7206 // [foo, bar, **baz]
7207 // ^^^^^
7208 //
7209 const pm_keyword_hash_node_t *keyword_hash = (const pm_keyword_hash_node_t *) element;
7210 pm_compile_hash_elements(iseq, element, &keyword_hash->elements, 0, Qundef, false, ret, scope_node);
7211
7212 // This boolean controls the manner in which we push the
7213 // hash onto the array. If it's all keyword splats, then we
7214 // can use the very specialized pushtoarraykwsplat
7215 // instruction to check if it's empty before we push it.
7216 size_t splats = 0;
7217 while (splats < keyword_hash->elements.size && PM_NODE_TYPE_P(keyword_hash->elements.nodes[splats], PM_ASSOC_SPLAT_NODE)) splats++;
7218
7219 if (keyword_hash->elements.size == splats) {
7220 PUSH_INSN(ret, *location, pushtoarraykwsplat);
7221 }
7222 else {
7223 new_array_size++;
7224 }
7225 }
7226 else if (
7227 PM_NODE_FLAG_P(element, PM_NODE_FLAG_STATIC_LITERAL) &&
7228 !PM_CONTAINER_P(element) &&
7229 !static_literal &&
7230 ((index + min_tmp_array_size) < elements->size)
7231 ) {
7232 // If we have a static literal, then there's the potential
7233 // to group a bunch of them together with a literal array
7234 // and then concat them together.
7235 size_t right_index = index + 1;
7236 while (
7237 right_index < elements->size &&
7238 PM_NODE_FLAG_P(elements->nodes[right_index], PM_NODE_FLAG_STATIC_LITERAL) &&
7239 !PM_CONTAINER_P(elements->nodes[right_index])
7240 ) right_index++;
7241
7242 size_t tmp_array_size = right_index - index;
7243 if (tmp_array_size >= min_tmp_array_size) {
7244 VALUE tmp_array = rb_ary_hidden_new(tmp_array_size);
7245
7246 // Create the temporary array.
7247 for (; tmp_array_size; tmp_array_size--)
7248 rb_ary_push(tmp_array, pm_static_literal_value(iseq, elements->nodes[index++], scope_node));
7249
7250 index--; // about to be incremented by for loop
7251 RB_OBJ_SET_FROZEN_SHAREABLE(tmp_array);
7252
7253 // Emit the optimized code.
7254 FLUSH_CHUNK;
7255 if (first_chunk) {
7256 PUSH_INSN1(ret, *location, duparray, tmp_array);
7257 first_chunk = false;
7258 }
7259 else {
7260 PUSH_INSN1(ret, *location, putobject, tmp_array);
7261 PUSH_INSN(ret, *location, concattoarray);
7262 }
7263 }
7264 else {
7265 PM_COMPILE_NOT_POPPED(element);
7266 if (++new_array_size >= max_new_array_size) FLUSH_CHUNK;
7267 static_literal = true;
7268 }
7269 } else {
7270 PM_COMPILE_NOT_POPPED(element);
7271 if (++new_array_size >= max_new_array_size) FLUSH_CHUNK;
7272 static_literal = false;
7273 }
7274 }
7275
7276 FLUSH_CHUNK;
7277 if (popped) PUSH_INSN(ret, *location, pop);
7278
7279#undef FLUSH_CHUNK
7280}
7281
7282static inline void
7283pm_compile_break_node(rb_iseq_t *iseq, const pm_break_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7284{
7285 unsigned long throw_flag = 0;
7286
7287 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
7288 /* while/until */
7289 LABEL *splabel = NEW_LABEL(0);
7290 PUSH_LABEL(ret, splabel);
7291 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->redo_label);
7292
7293 if (node->arguments != NULL) {
7294 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
7295 }
7296 else {
7297 PUSH_INSN(ret, *location, putnil);
7298 }
7299
7300 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
7301 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
7302 PUSH_ADJUST_RESTORE(ret, splabel);
7303 if (!popped) PUSH_INSN(ret, *location, putnil);
7304 }
7305 else {
7306 const rb_iseq_t *ip = iseq;
7307
7308 while (ip) {
7309 if (!ISEQ_COMPILE_DATA(ip)) {
7310 ip = 0;
7311 break;
7312 }
7313
7314 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
7315 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
7316 }
7317 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
7318 throw_flag = 0;
7319 }
7320 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
7321 COMPILE_ERROR(iseq, location->line, "Invalid break");
7322 return;
7323 }
7324 else {
7325 ip = ISEQ_BODY(ip)->parent_iseq;
7326 continue;
7327 }
7328
7329 /* escape from block */
7330 if (node->arguments != NULL) {
7331 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
7332 }
7333 else {
7334 PUSH_INSN(ret, *location, putnil);
7335 }
7336
7337 PUSH_INSN1(ret, *location, throw, INT2FIX(throw_flag | TAG_BREAK));
7338 if (popped) PUSH_INSN(ret, *location, pop);
7339
7340 return;
7341 }
7342
7343 COMPILE_ERROR(iseq, location->line, "Invalid break");
7344 }
7345}
7346
7347static inline void
7348pm_compile_call_node(rb_iseq_t *iseq, const pm_call_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7349{
7350 ID method_id = pm_constant_id_lookup(scope_node, node->name);
7351
7352 const pm_location_t *message_loc = &node->message_loc;
7353 if (message_loc->start == NULL) message_loc = &node->base.location;
7354
7355 const pm_node_location_t location = PM_LOCATION_START_LOCATION(scope_node->parser, message_loc, node->base.node_id);
7356 const char *builtin_func;
7357
7358 if (UNLIKELY(iseq_has_builtin_function_table(iseq)) && (builtin_func = pm_iseq_builtin_function_name(scope_node, node->receiver, method_id)) != NULL) {
7359 pm_compile_builtin_function_call(iseq, ret, scope_node, node, &location, popped, ISEQ_COMPILE_DATA(iseq)->current_block, builtin_func);
7360 return;
7361 }
7362
7363 LABEL *start = NEW_LABEL(location.line);
7364 if (node->block) PUSH_LABEL(ret, start);
7365
7366 switch (method_id) {
7367 case idUMinus: {
7368 if (pm_opt_str_freeze_p(iseq, node)) {
7369 VALUE value = parse_static_literal_string(iseq, scope_node, node->receiver, &((const pm_string_node_t * ) node->receiver)->unescaped);
7370 const struct rb_callinfo *callinfo = new_callinfo(iseq, idUMinus, 0, 0, NULL, FALSE);
7371 PUSH_INSN2(ret, location, opt_str_uminus, value, callinfo);
7372 if (popped) PUSH_INSN(ret, location, pop);
7373 return;
7374 }
7375 break;
7376 }
7377 case idFreeze: {
7378 if (pm_opt_str_freeze_p(iseq, node)) {
7379 VALUE value = parse_static_literal_string(iseq, scope_node, node->receiver, &((const pm_string_node_t * ) node->receiver)->unescaped);
7380 const struct rb_callinfo *callinfo = new_callinfo(iseq, idFreeze, 0, 0, NULL, FALSE);
7381 PUSH_INSN2(ret, location, opt_str_freeze, value, callinfo);
7382 if (popped) PUSH_INSN(ret, location, pop);
7383 return;
7384 }
7385 break;
7386 }
7387 }
7388
7389 if (PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_ATTRIBUTE_WRITE) && !popped) {
7390 PUSH_INSN(ret, location, putnil);
7391 }
7392
7393 if (node->receiver == NULL) {
7394 PUSH_INSN(ret, location, putself);
7395 }
7396 else {
7397 if (method_id == idCall && PM_NODE_TYPE_P(node->receiver, PM_LOCAL_VARIABLE_READ_NODE)) {
7398 const pm_local_variable_read_node_t *read_node_cast = (const pm_local_variable_read_node_t *) node->receiver;
7399 uint32_t node_id = node->receiver->node_id;
7400 int idx, level;
7401
7402 if (iseq_block_param_id_p(iseq, pm_constant_id_lookup(scope_node, read_node_cast->name), &idx, &level)) {
7403 ADD_ELEM(ret, (LINK_ELEMENT *) new_insn_body(iseq, location.line, node_id, BIN(getblockparamproxy), 2, INT2FIX((idx) + VM_ENV_DATA_SIZE - 1), INT2FIX(level)));
7404 }
7405 else {
7406 PM_COMPILE_NOT_POPPED(node->receiver);
7407 }
7408 }
7409 else {
7410 PM_COMPILE_NOT_POPPED(node->receiver);
7411 }
7412 }
7413
7414 pm_compile_call(iseq, node, ret, popped, scope_node, method_id, start);
7415 return;
7416}
7417
7418static inline void
7419pm_compile_call_operator_write_node(rb_iseq_t *iseq, const pm_call_operator_write_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7420{
7421 int flag = 0;
7422
7423 if (PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_IGNORE_VISIBILITY)) {
7424 flag = VM_CALL_FCALL;
7425 }
7426
7427 PM_COMPILE_NOT_POPPED(node->receiver);
7428
7429 LABEL *safe_label = NULL;
7430 if (PM_NODE_FLAG_P(node, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION)) {
7431 safe_label = NEW_LABEL(location->line);
7432 PUSH_INSN(ret, *location, dup);
7433 PUSH_INSNL(ret, *location, branchnil, safe_label);
7434 }
7435
7436 PUSH_INSN(ret, *location, dup);
7437
7438 ID id_read_name = pm_constant_id_lookup(scope_node, node->read_name);
7439 PUSH_SEND_WITH_FLAG(ret, *location, id_read_name, INT2FIX(0), INT2FIX(flag));
7440
7441 PM_COMPILE_NOT_POPPED(node->value);
7442 ID id_operator = pm_constant_id_lookup(scope_node, node->binary_operator);
7443 PUSH_SEND(ret, *location, id_operator, INT2FIX(1));
7444
7445 if (!popped) {
7446 PUSH_INSN(ret, *location, swap);
7447 PUSH_INSN1(ret, *location, topn, INT2FIX(1));
7448 }
7449
7450 ID id_write_name = pm_constant_id_lookup(scope_node, node->write_name);
7451 PUSH_SEND_WITH_FLAG(ret, *location, id_write_name, INT2FIX(1), INT2FIX(flag));
7452
7453 if (safe_label != NULL && popped) PUSH_LABEL(ret, safe_label);
7454 PUSH_INSN(ret, *location, pop);
7455 if (safe_label != NULL && !popped) PUSH_LABEL(ret, safe_label);
7456}
7457
7474static VALUE
7475pm_compile_case_node_dispatch(rb_iseq_t *iseq, VALUE dispatch, const pm_node_t *node, LABEL *label, const pm_scope_node_t *scope_node)
7476{
7477 VALUE key = Qundef;
7478 switch (PM_NODE_TYPE(node)) {
7479 case PM_FLOAT_NODE: {
7480 key = pm_static_literal_value(iseq, node, scope_node);
7481 double intptr;
7482
7483 if (modf(RFLOAT_VALUE(key), &intptr) == 0.0) {
7484 key = (FIXABLE(intptr) ? LONG2FIX((long) intptr) : rb_dbl2big(intptr));
7485 }
7486
7487 break;
7488 }
7489 case PM_FALSE_NODE:
7490 case PM_INTEGER_NODE:
7491 case PM_NIL_NODE:
7492 case PM_SOURCE_FILE_NODE:
7493 case PM_SOURCE_LINE_NODE:
7494 case PM_SYMBOL_NODE:
7495 case PM_TRUE_NODE:
7496 key = pm_static_literal_value(iseq, node, scope_node);
7497 break;
7498 case PM_STRING_NODE: {
7499 const pm_string_node_t *cast = (const pm_string_node_t *) node;
7500 key = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
7501 break;
7502 }
7503 default:
7504 return Qundef;
7505 }
7506
7507 if (NIL_P(rb_hash_lookup(dispatch, key))) {
7508 rb_hash_aset(dispatch, key, ((VALUE) label) | 1);
7509 }
7510 return dispatch;
7511}
7512
7516static inline void
7517pm_compile_case_node(rb_iseq_t *iseq, const pm_case_node_t *cast, const pm_node_location_t *node_location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7518{
7519 const pm_parser_t *parser = scope_node->parser;
7520 const pm_node_location_t location = *node_location;
7521 const pm_node_list_t *conditions = &cast->conditions;
7522
7523 // This is the anchor that we will compile the conditions of the various
7524 // `when` nodes into. If a match is found, they will need to jump into
7525 // the body_seq anchor to the correct spot.
7526 DECL_ANCHOR(cond_seq);
7527
7528 // This is the anchor that we will compile the bodies of the various
7529 // `when` nodes into. We'll make sure that the clauses that are compiled
7530 // jump into the correct spots within this anchor.
7531 DECL_ANCHOR(body_seq);
7532
7533 // This is the label where all of the when clauses will jump to if they
7534 // have matched and are done executing their bodies.
7535 LABEL *end_label = NEW_LABEL(location.line);
7536
7537 // If we have a predicate on this case statement, then it's going to
7538 // compare all of the various when clauses to the predicate. If we
7539 // don't, then it's basically an if-elsif-else chain.
7540 if (cast->predicate == NULL) {
7541 // Establish branch coverage for the case node.
7542 VALUE branches = Qfalse;
7543 rb_code_location_t case_location = { 0 };
7544 int branch_id = 0;
7545
7546 if (PM_BRANCH_COVERAGE_P(iseq)) {
7547 case_location = pm_code_location(scope_node, (const pm_node_t *) cast);
7548 branches = decl_branch_base(iseq, PTR2NUM(cast), &case_location, "case");
7549 }
7550
7551 // Loop through each clauses in the case node and compile each of
7552 // the conditions within them into cond_seq. If they match, they
7553 // should jump into their respective bodies in body_seq.
7554 for (size_t clause_index = 0; clause_index < conditions->size; clause_index++) {
7555 const pm_when_node_t *clause = (const pm_when_node_t *) conditions->nodes[clause_index];
7556 const pm_node_list_t *conditions = &clause->conditions;
7557
7558 int clause_lineno = pm_node_line_number(parser, (const pm_node_t *) clause);
7559 LABEL *label = NEW_LABEL(clause_lineno);
7560 PUSH_LABEL(body_seq, label);
7561
7562 // Establish branch coverage for the when clause.
7563 if (PM_BRANCH_COVERAGE_P(iseq)) {
7564 rb_code_location_t branch_location = pm_code_location(scope_node, clause->statements != NULL ? ((const pm_node_t *) clause->statements) : ((const pm_node_t *) clause));
7565 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "when", branches);
7566 }
7567
7568 if (clause->statements != NULL) {
7569 pm_compile_node(iseq, (const pm_node_t *) clause->statements, body_seq, popped, scope_node);
7570 }
7571 else if (!popped) {
7572 PUSH_SYNTHETIC_PUTNIL(body_seq, iseq);
7573 }
7574
7575 PUSH_INSNL(body_seq, location, jump, end_label);
7576
7577 // Compile each of the conditions for the when clause into the
7578 // cond_seq. Each one should have a unique condition and should
7579 // jump to the subsequent one if it doesn't match.
7580 for (size_t condition_index = 0; condition_index < conditions->size; condition_index++) {
7581 const pm_node_t *condition = conditions->nodes[condition_index];
7582
7583 if (PM_NODE_TYPE_P(condition, PM_SPLAT_NODE)) {
7584 pm_node_location_t cond_location = PM_NODE_START_LOCATION(parser, condition);
7585 PUSH_INSN(cond_seq, cond_location, putnil);
7586 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
7587 PUSH_INSN1(cond_seq, cond_location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_WHEN | VM_CHECKMATCH_ARRAY));
7588 PUSH_INSNL(cond_seq, cond_location, branchif, label);
7589 }
7590 else {
7591 LABEL *next_label = NEW_LABEL(pm_node_line_number(parser, condition));
7592 pm_compile_branch_condition(iseq, cond_seq, condition, label, next_label, false, scope_node);
7593 PUSH_LABEL(cond_seq, next_label);
7594 }
7595 }
7596 }
7597
7598 // Establish branch coverage for the else clause (implicit or
7599 // explicit).
7600 if (PM_BRANCH_COVERAGE_P(iseq)) {
7601 rb_code_location_t branch_location;
7602
7603 if (cast->else_clause == NULL) {
7604 branch_location = case_location;
7605 } else if (cast->else_clause->statements == NULL) {
7606 branch_location = pm_code_location(scope_node, (const pm_node_t *) cast->else_clause);
7607 } else {
7608 branch_location = pm_code_location(scope_node, (const pm_node_t *) cast->else_clause->statements);
7609 }
7610
7611 add_trace_branch_coverage(iseq, cond_seq, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
7612 }
7613
7614 // Compile the else clause if there is one.
7615 if (cast->else_clause != NULL) {
7616 pm_compile_node(iseq, (const pm_node_t *) cast->else_clause, cond_seq, popped, scope_node);
7617 }
7618 else if (!popped) {
7619 PUSH_SYNTHETIC_PUTNIL(cond_seq, iseq);
7620 }
7621
7622 // Finally, jump to the end label if none of the other conditions
7623 // have matched.
7624 PUSH_INSNL(cond_seq, location, jump, end_label);
7625 PUSH_SEQ(ret, cond_seq);
7626 }
7627 else {
7628 // Establish branch coverage for the case node.
7629 VALUE branches = Qfalse;
7630 rb_code_location_t case_location = { 0 };
7631 int branch_id = 0;
7632
7633 if (PM_BRANCH_COVERAGE_P(iseq)) {
7634 case_location = pm_code_location(scope_node, (const pm_node_t *) cast);
7635 branches = decl_branch_base(iseq, PTR2NUM(cast), &case_location, "case");
7636 }
7637
7638 // This is the label where everything will fall into if none of the
7639 // conditions matched.
7640 LABEL *else_label = NEW_LABEL(location.line);
7641
7642 // It's possible for us to speed up the case node by using a
7643 // dispatch hash. This is a hash that maps the conditions of the
7644 // various when clauses to the labels of their bodies. If we can
7645 // compile the conditions into a hash key, then we can use a hash
7646 // lookup to jump directly to the correct when clause body.
7647 VALUE dispatch = Qundef;
7648 if (ISEQ_COMPILE_DATA(iseq)->option->specialized_instruction) {
7649 dispatch = rb_hash_new();
7650 RHASH_TBL_RAW(dispatch)->type = &cdhash_type;
7651 }
7652
7653 // We're going to loop through each of the conditions in the case
7654 // node and compile each of their contents into both the cond_seq
7655 // and the body_seq. Each condition will use its own label to jump
7656 // from its conditions into its body.
7657 //
7658 // Note that none of the code in the loop below should be adding
7659 // anything to ret, as we're going to be laying out the entire case
7660 // node instructions later.
7661 for (size_t clause_index = 0; clause_index < conditions->size; clause_index++) {
7662 const pm_when_node_t *clause = (const pm_when_node_t *) conditions->nodes[clause_index];
7663 pm_node_location_t clause_location = PM_NODE_START_LOCATION(parser, (const pm_node_t *) clause);
7664
7665 const pm_node_list_t *conditions = &clause->conditions;
7666 LABEL *label = NEW_LABEL(clause_location.line);
7667
7668 // Compile each of the conditions for the when clause into the
7669 // cond_seq. Each one should have a unique comparison that then
7670 // jumps into the body if it matches.
7671 for (size_t condition_index = 0; condition_index < conditions->size; condition_index++) {
7672 const pm_node_t *condition = conditions->nodes[condition_index];
7673 const pm_node_location_t condition_location = PM_NODE_START_LOCATION(parser, condition);
7674
7675 // If we haven't already abandoned the optimization, then
7676 // we're going to try to compile the condition into the
7677 // dispatch hash.
7678 if (dispatch != Qundef) {
7679 dispatch = pm_compile_case_node_dispatch(iseq, dispatch, condition, label, scope_node);
7680 }
7681
7682 if (PM_NODE_TYPE_P(condition, PM_SPLAT_NODE)) {
7683 PUSH_INSN(cond_seq, condition_location, dup);
7684 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
7685 PUSH_INSN1(cond_seq, condition_location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_CASE | VM_CHECKMATCH_ARRAY));
7686 }
7687 else {
7688 if (PM_NODE_TYPE_P(condition, PM_STRING_NODE)) {
7689 const pm_string_node_t *string = (const pm_string_node_t *) condition;
7690 VALUE value = parse_static_literal_string(iseq, scope_node, condition, &string->unescaped);
7691 PUSH_INSN1(cond_seq, condition_location, putobject, value);
7692 }
7693 else {
7694 pm_compile_node(iseq, condition, cond_seq, false, scope_node);
7695 }
7696
7697 PUSH_INSN1(cond_seq, condition_location, topn, INT2FIX(1));
7698 PUSH_SEND_WITH_FLAG(cond_seq, condition_location, idEqq, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
7699 }
7700
7701 PUSH_INSNL(cond_seq, condition_location, branchif, label);
7702 }
7703
7704 // Now, add the label to the body and compile the body of the
7705 // when clause. This involves popping the predicate, compiling
7706 // the statements to be executed, and then compiling a jump to
7707 // the end of the case node.
7708 PUSH_LABEL(body_seq, label);
7709 PUSH_INSN(body_seq, clause_location, pop);
7710
7711 // Establish branch coverage for the when clause.
7712 if (PM_BRANCH_COVERAGE_P(iseq)) {
7713 rb_code_location_t branch_location = pm_code_location(scope_node, clause->statements != NULL ? ((const pm_node_t *) clause->statements) : ((const pm_node_t *) clause));
7714 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "when", branches);
7715 }
7716
7717 if (clause->statements != NULL) {
7718 pm_compile_node(iseq, (const pm_node_t *) clause->statements, body_seq, popped, scope_node);
7719 }
7720 else if (!popped) {
7721 PUSH_SYNTHETIC_PUTNIL(body_seq, iseq);
7722 }
7723
7724 PUSH_INSNL(body_seq, clause_location, jump, end_label);
7725 }
7726
7727 // Now that we have compiled the conditions and the bodies of the
7728 // various when clauses, we can compile the predicate, lay out the
7729 // conditions, compile the fallback subsequent if there is one, and
7730 // finally put in the bodies of the when clauses.
7731 PM_COMPILE_NOT_POPPED(cast->predicate);
7732
7733 // If we have a dispatch hash, then we'll use it here to create the
7734 // optimization.
7735 if (dispatch != Qundef) {
7736 PUSH_INSN(ret, location, dup);
7737 RB_OBJ_SET_SHAREABLE(dispatch); // it is special that the hash is shareable but not frozen, because compile.c modify them. This Hahs instance is not accessible so it is safe to leave it.
7738 PUSH_INSN2(ret, location, opt_case_dispatch, dispatch, else_label);
7739 LABEL_REF(else_label);
7740 }
7741
7742 PUSH_SEQ(ret, cond_seq);
7743
7744 // Compile either the explicit else clause or an implicit else
7745 // clause.
7746 PUSH_LABEL(ret, else_label);
7747
7748 if (cast->else_clause != NULL) {
7749 pm_node_location_t else_location = PM_NODE_START_LOCATION(parser, cast->else_clause->statements != NULL ? ((const pm_node_t *) cast->else_clause->statements) : ((const pm_node_t *) cast->else_clause));
7750 PUSH_INSN(ret, else_location, pop);
7751
7752 // Establish branch coverage for the else clause.
7753 if (PM_BRANCH_COVERAGE_P(iseq)) {
7754 rb_code_location_t branch_location = pm_code_location(scope_node, cast->else_clause->statements != NULL ? ((const pm_node_t *) cast->else_clause->statements) : ((const pm_node_t *) cast->else_clause));
7755 add_trace_branch_coverage(iseq, ret, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
7756 }
7757
7758 PM_COMPILE((const pm_node_t *) cast->else_clause);
7759 PUSH_INSNL(ret, else_location, jump, end_label);
7760 }
7761 else {
7762 PUSH_INSN(ret, location, pop);
7763
7764 // Establish branch coverage for the implicit else clause.
7765 if (PM_BRANCH_COVERAGE_P(iseq)) {
7766 add_trace_branch_coverage(iseq, ret, &case_location, case_location.beg_pos.column, branch_id, "else", branches);
7767 }
7768
7769 if (!popped) PUSH_INSN(ret, location, putnil);
7770 PUSH_INSNL(ret, location, jump, end_label);
7771 }
7772 }
7773
7774 PUSH_SEQ(ret, body_seq);
7775 PUSH_LABEL(ret, end_label);
7776}
7777
7778static inline void
7779pm_compile_case_match_node(rb_iseq_t *iseq, const pm_case_match_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7780{
7781 // This is the anchor that we will compile the bodies of the various
7782 // `in` nodes into. We'll make sure that the patterns that are compiled
7783 // jump into the correct spots within this anchor.
7784 DECL_ANCHOR(body_seq);
7785
7786 // This is the anchor that we will compile the patterns of the various
7787 // `in` nodes into. If a match is found, they will need to jump into the
7788 // body_seq anchor to the correct spot.
7789 DECL_ANCHOR(cond_seq);
7790
7791 // This label is used to indicate the end of the entire node. It is
7792 // jumped to after the entire stack is cleaned up.
7793 LABEL *end_label = NEW_LABEL(location->line);
7794
7795 // This label is used as the fallback for the case match. If no match is
7796 // found, then we jump to this label. This is either an `else` clause or
7797 // an error handler.
7798 LABEL *else_label = NEW_LABEL(location->line);
7799
7800 // We're going to use this to uniquely identify each branch so that we
7801 // can track coverage information.
7802 rb_code_location_t case_location = { 0 };
7803 VALUE branches = Qfalse;
7804 int branch_id = 0;
7805
7806 if (PM_BRANCH_COVERAGE_P(iseq)) {
7807 case_location = pm_code_location(scope_node, (const pm_node_t *) node);
7808 branches = decl_branch_base(iseq, PTR2NUM(node), &case_location, "case");
7809 }
7810
7811 // If there is only one pattern, then the behavior changes a bit. It
7812 // effectively gets treated as a match required node (this is how it is
7813 // represented in the other parser).
7814 bool in_single_pattern = node->else_clause == NULL && node->conditions.size == 1;
7815
7816 // First, we're going to push a bunch of stuff onto the stack that is
7817 // going to serve as our scratch space.
7818 if (in_single_pattern) {
7819 PUSH_INSN(ret, *location, putnil); // key error key
7820 PUSH_INSN(ret, *location, putnil); // key error matchee
7821 PUSH_INSN1(ret, *location, putobject, Qfalse); // key error?
7822 PUSH_INSN(ret, *location, putnil); // error string
7823 }
7824
7825 // Now we're going to compile the value to match against.
7826 PUSH_INSN(ret, *location, putnil); // deconstruct cache
7827 PM_COMPILE_NOT_POPPED(node->predicate);
7828
7829 // Next, we'll loop through every in clause and compile its body into
7830 // the body_seq anchor and its pattern into the cond_seq anchor. We'll
7831 // make sure the pattern knows how to jump correctly into the body if it
7832 // finds a match.
7833 for (size_t index = 0; index < node->conditions.size; index++) {
7834 const pm_node_t *condition = node->conditions.nodes[index];
7835 RUBY_ASSERT(PM_NODE_TYPE_P(condition, PM_IN_NODE));
7836
7837 const pm_in_node_t *in_node = (const pm_in_node_t *) condition;
7838 const pm_node_location_t in_location = PM_NODE_START_LOCATION(scope_node->parser, in_node);
7839 const pm_node_location_t pattern_location = PM_NODE_START_LOCATION(scope_node->parser, in_node->pattern);
7840
7841 if (branch_id) {
7842 PUSH_INSN(body_seq, in_location, putnil);
7843 }
7844
7845 LABEL *body_label = NEW_LABEL(in_location.line);
7846 PUSH_LABEL(body_seq, body_label);
7847 PUSH_INSN1(body_seq, in_location, adjuststack, INT2FIX(in_single_pattern ? 6 : 2));
7848
7849 // Establish branch coverage for the in clause.
7850 if (PM_BRANCH_COVERAGE_P(iseq)) {
7851 rb_code_location_t branch_location = pm_code_location(scope_node, in_node->statements != NULL ? ((const pm_node_t *) in_node->statements) : ((const pm_node_t *) in_node));
7852 add_trace_branch_coverage(iseq, body_seq, &branch_location, branch_location.beg_pos.column, branch_id++, "in", branches);
7853 }
7854
7855 if (in_node->statements != NULL) {
7856 PM_COMPILE_INTO_ANCHOR(body_seq, (const pm_node_t *) in_node->statements);
7857 }
7858 else if (!popped) {
7859 PUSH_SYNTHETIC_PUTNIL(body_seq, iseq);
7860 }
7861
7862 PUSH_INSNL(body_seq, in_location, jump, end_label);
7863 LABEL *next_pattern_label = NEW_LABEL(pattern_location.line);
7864
7865 PUSH_INSN(cond_seq, pattern_location, dup);
7866 pm_compile_pattern(iseq, scope_node, in_node->pattern, cond_seq, body_label, next_pattern_label, in_single_pattern, true, 2);
7867 PUSH_LABEL(cond_seq, next_pattern_label);
7868 LABEL_UNREMOVABLE(next_pattern_label);
7869 }
7870
7871 if (node->else_clause != NULL) {
7872 // If we have an `else` clause, then this becomes our fallback (and
7873 // there is no need to compile in code to potentially raise an
7874 // error).
7875 const pm_else_node_t *else_node = node->else_clause;
7876
7877 PUSH_LABEL(cond_seq, else_label);
7878 PUSH_INSN(cond_seq, *location, pop);
7879 PUSH_INSN(cond_seq, *location, pop);
7880
7881 // Establish branch coverage for the else clause.
7882 if (PM_BRANCH_COVERAGE_P(iseq)) {
7883 rb_code_location_t branch_location = pm_code_location(scope_node, else_node->statements != NULL ? ((const pm_node_t *) else_node->statements) : ((const pm_node_t *) else_node));
7884 add_trace_branch_coverage(iseq, cond_seq, &branch_location, branch_location.beg_pos.column, branch_id, "else", branches);
7885 }
7886
7887 PM_COMPILE_INTO_ANCHOR(cond_seq, (const pm_node_t *) else_node);
7888 PUSH_INSNL(cond_seq, *location, jump, end_label);
7889 PUSH_INSN(cond_seq, *location, putnil);
7890 if (popped) PUSH_INSN(cond_seq, *location, putnil);
7891 }
7892 else {
7893 // Otherwise, if we do not have an `else` clause, we will compile in
7894 // the code to handle raising an appropriate error.
7895 PUSH_LABEL(cond_seq, else_label);
7896
7897 // Establish branch coverage for the implicit else clause.
7898 add_trace_branch_coverage(iseq, cond_seq, &case_location, case_location.beg_pos.column, branch_id, "else", branches);
7899
7900 if (in_single_pattern) {
7901 pm_compile_pattern_error_handler(iseq, scope_node, (const pm_node_t *) node, cond_seq, end_label, popped);
7902 }
7903 else {
7904 PUSH_INSN1(cond_seq, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
7905 PUSH_INSN1(cond_seq, *location, putobject, rb_eNoMatchingPatternError);
7906 PUSH_INSN1(cond_seq, *location, topn, INT2FIX(2));
7907 PUSH_SEND(cond_seq, *location, id_core_raise, INT2FIX(2));
7908
7909 PUSH_INSN1(cond_seq, *location, adjuststack, INT2FIX(3));
7910 if (!popped) PUSH_INSN(cond_seq, *location, putnil);
7911 PUSH_INSNL(cond_seq, *location, jump, end_label);
7912 PUSH_INSN1(cond_seq, *location, dupn, INT2FIX(1));
7913 if (popped) PUSH_INSN(cond_seq, *location, putnil);
7914 }
7915 }
7916
7917 // At the end of all of this compilation, we will add the code for the
7918 // conditions first, then the various bodies, then mark the end of the
7919 // entire sequence with the end label.
7920 PUSH_SEQ(ret, cond_seq);
7921 PUSH_SEQ(ret, body_seq);
7922 PUSH_LABEL(ret, end_label);
7923}
7924
7925static inline void
7926pm_compile_forwarding_super_node(rb_iseq_t *iseq, const pm_forwarding_super_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
7927{
7928 const rb_iseq_t *block = NULL;
7929 const rb_iseq_t *previous_block = NULL;
7930 LABEL *retry_label = NULL;
7931 LABEL *retry_end_l = NULL;
7932
7933 if (node->block != NULL) {
7934 previous_block = ISEQ_COMPILE_DATA(iseq)->current_block;
7935 ISEQ_COMPILE_DATA(iseq)->current_block = NULL;
7936
7937 retry_label = NEW_LABEL(location->line);
7938 retry_end_l = NEW_LABEL(location->line);
7939
7940 PUSH_LABEL(ret, retry_label);
7941 }
7942 else {
7943 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
7944 }
7945
7946 PUSH_INSN(ret, *location, putself);
7947 int flag = VM_CALL_ZSUPER | VM_CALL_SUPER | VM_CALL_FCALL;
7948
7949 if (node->block != NULL) {
7950 pm_scope_node_t next_scope_node;
7951 pm_scope_node_init((const pm_node_t *) node->block, &next_scope_node, scope_node);
7952
7953 ISEQ_COMPILE_DATA(iseq)->current_block = block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location->line);
7954 pm_scope_node_destroy(&next_scope_node);
7955 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) block);
7956 }
7957
7958 DECL_ANCHOR(args);
7959
7960 struct rb_iseq_constant_body *const body = ISEQ_BODY(iseq);
7961 const rb_iseq_t *local_iseq = body->local_iseq;
7962 const struct rb_iseq_constant_body *const local_body = ISEQ_BODY(local_iseq);
7963
7964 int argc = 0;
7965 int depth = get_lvar_level(iseq);
7966
7967 if (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
7968 flag |= VM_CALL_FORWARDING;
7969 pm_local_index_t mult_local = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_DOT3, 0);
7970 PUSH_GETLOCAL(ret, *location, mult_local.index, mult_local.level);
7971
7972 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, 0, flag, NULL, block != NULL);
7973 PUSH_INSN2(ret, *location, invokesuperforward, callinfo, block);
7974
7975 if (popped) PUSH_INSN(ret, *location, pop);
7976 if (node->block) {
7977 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
7978 }
7979 return;
7980 }
7981
7982 if (local_body->param.flags.has_lead) {
7983 /* required arguments */
7984 for (int i = 0; i < local_body->param.lead_num; i++) {
7985 int idx = local_body->local_table_size - i;
7986 PUSH_GETLOCAL(args, *location, idx, depth);
7987 }
7988 argc += local_body->param.lead_num;
7989 }
7990
7991 if (local_body->param.flags.has_opt) {
7992 /* optional arguments */
7993 for (int j = 0; j < local_body->param.opt_num; j++) {
7994 int idx = local_body->local_table_size - (argc + j);
7995 PUSH_GETLOCAL(args, *location, idx, depth);
7996 }
7997 argc += local_body->param.opt_num;
7998 }
7999
8000 if (local_body->param.flags.has_rest) {
8001 /* rest argument */
8002 int idx = local_body->local_table_size - local_body->param.rest_start;
8003 PUSH_GETLOCAL(args, *location, idx, depth);
8004 PUSH_INSN1(args, *location, splatarray, Qfalse);
8005
8006 argc = local_body->param.rest_start + 1;
8007 flag |= VM_CALL_ARGS_SPLAT;
8008 }
8009
8010 if (local_body->param.flags.has_post) {
8011 /* post arguments */
8012 int post_len = local_body->param.post_num;
8013 int post_start = local_body->param.post_start;
8014
8015 int j = 0;
8016 for (; j < post_len; j++) {
8017 int idx = local_body->local_table_size - (post_start + j);
8018 PUSH_GETLOCAL(args, *location, idx, depth);
8019 }
8020
8021 if (local_body->param.flags.has_rest) {
8022 // argc remains unchanged from rest branch
8023 PUSH_INSN1(args, *location, newarray, INT2FIX(j));
8024 PUSH_INSN(args, *location, concatarray);
8025 }
8026 else {
8027 argc = post_len + post_start;
8028 }
8029 }
8030
8031 const struct rb_iseq_param_keyword *const local_keyword = local_body->param.keyword;
8032 if (local_body->param.flags.has_kw) {
8033 int local_size = local_body->local_table_size;
8034 argc++;
8035
8036 PUSH_INSN1(args, *location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
8037
8038 if (local_body->param.flags.has_kwrest) {
8039 int idx = local_body->local_table_size - local_keyword->rest_start;
8040 PUSH_GETLOCAL(args, *location, idx, depth);
8041 RUBY_ASSERT(local_keyword->num > 0);
8042 PUSH_SEND(args, *location, rb_intern("dup"), INT2FIX(0));
8043 }
8044 else {
8045 PUSH_INSN1(args, *location, newhash, INT2FIX(0));
8046 }
8047 int i = 0;
8048 for (; i < local_keyword->num; ++i) {
8049 ID id = local_keyword->table[i];
8050 int idx = local_size - get_local_var_idx(local_iseq, id);
8051
8052 {
8053 VALUE operand = ID2SYM(id);
8054 PUSH_INSN1(args, *location, putobject, operand);
8055 }
8056
8057 PUSH_GETLOCAL(args, *location, idx, depth);
8058 }
8059
8060 PUSH_SEND(args, *location, id_core_hash_merge_ptr, INT2FIX(i * 2 + 1));
8061 flag |= VM_CALL_KW_SPLAT| VM_CALL_KW_SPLAT_MUT;
8062 }
8063 else if (local_body->param.flags.has_kwrest) {
8064 int idx = local_body->local_table_size - local_keyword->rest_start;
8065 PUSH_GETLOCAL(args, *location, idx, depth);
8066 argc++;
8067 flag |= VM_CALL_KW_SPLAT;
8068 }
8069
8070 PUSH_SEQ(ret, args);
8071
8072 {
8073 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, argc, flag, NULL, block != NULL);
8074 PUSH_INSN2(ret, *location, invokesuper, callinfo, block);
8075 }
8076
8077 if (node->block != NULL) {
8078 pm_compile_retry_end_label(iseq, ret, retry_end_l);
8079 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, block, retry_end_l);
8080 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
8081 }
8082
8083 if (popped) PUSH_INSN(ret, *location, pop);
8084}
8085
8086static inline void
8087pm_compile_match_required_node(rb_iseq_t *iseq, const pm_match_required_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8088{
8089 LABEL *matched_label = NEW_LABEL(location->line);
8090 LABEL *unmatched_label = NEW_LABEL(location->line);
8091 LABEL *done_label = NEW_LABEL(location->line);
8092
8093 // First, we're going to push a bunch of stuff onto the stack that is
8094 // going to serve as our scratch space.
8095 PUSH_INSN(ret, *location, putnil); // key error key
8096 PUSH_INSN(ret, *location, putnil); // key error matchee
8097 PUSH_INSN1(ret, *location, putobject, Qfalse); // key error?
8098 PUSH_INSN(ret, *location, putnil); // error string
8099 PUSH_INSN(ret, *location, putnil); // deconstruct cache
8100
8101 // Next we're going to compile the value expression such that it's on
8102 // the stack.
8103 PM_COMPILE_NOT_POPPED(node->value);
8104
8105 // Here we'll dup it so that it can be used for comparison, but also be
8106 // used for error handling.
8107 PUSH_INSN(ret, *location, dup);
8108
8109 // Next we'll compile the pattern. We indicate to the pm_compile_pattern
8110 // function that this is the only pattern that will be matched against
8111 // through the in_single_pattern parameter. We also indicate that the
8112 // value to compare against is 2 slots from the top of the stack (the
8113 // base_index parameter).
8114 pm_compile_pattern(iseq, scope_node, node->pattern, ret, matched_label, unmatched_label, true, true, 2);
8115
8116 // If the pattern did not match the value, then we're going to compile
8117 // in our error handler code. This will determine which error to raise
8118 // and raise it.
8119 PUSH_LABEL(ret, unmatched_label);
8120 pm_compile_pattern_error_handler(iseq, scope_node, (const pm_node_t *) node, ret, done_label, popped);
8121
8122 // If the pattern did match, we'll clean up the values we've pushed onto
8123 // the stack and then push nil onto the stack if it's not popped.
8124 PUSH_LABEL(ret, matched_label);
8125 PUSH_INSN1(ret, *location, adjuststack, INT2FIX(6));
8126 if (!popped) PUSH_INSN(ret, *location, putnil);
8127 PUSH_INSNL(ret, *location, jump, done_label);
8128
8129 PUSH_LABEL(ret, done_label);
8130}
8131
8132static inline void
8133pm_compile_match_write_node(rb_iseq_t *iseq, const pm_match_write_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8134{
8135 LABEL *fail_label = NEW_LABEL(location->line);
8136 LABEL *end_label = NEW_LABEL(location->line);
8137
8138 // First, we'll compile the call so that all of its instructions are
8139 // present. Then we'll compile all of the local variable targets.
8140 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->call);
8141
8142 // Now, check if the match was successful. If it was, then we'll
8143 // continue on and assign local variables. Otherwise we'll skip over the
8144 // assignment code.
8145 {
8146 VALUE operand = rb_id2sym(idBACKREF);
8147 PUSH_INSN1(ret, *location, getglobal, operand);
8148 }
8149
8150 PUSH_INSN(ret, *location, dup);
8151 PUSH_INSNL(ret, *location, branchunless, fail_label);
8152
8153 // If there's only a single local variable target, we can skip some of
8154 // the bookkeeping, so we'll put a special branch here.
8155 size_t targets_count = node->targets.size;
8156
8157 if (targets_count == 1) {
8158 const pm_node_t *target = node->targets.nodes[0];
8159 RUBY_ASSERT(PM_NODE_TYPE_P(target, PM_LOCAL_VARIABLE_TARGET_NODE));
8160
8161 const pm_local_variable_target_node_t *local_target = (const pm_local_variable_target_node_t *) target;
8162 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, local_target->name, local_target->depth);
8163
8164 {
8165 VALUE operand = rb_id2sym(pm_constant_id_lookup(scope_node, local_target->name));
8166 PUSH_INSN1(ret, *location, putobject, operand);
8167 }
8168
8169 PUSH_SEND(ret, *location, idAREF, INT2FIX(1));
8170 PUSH_LABEL(ret, fail_label);
8171 PUSH_SETLOCAL(ret, *location, index.index, index.level);
8172 if (popped) PUSH_INSN(ret, *location, pop);
8173 return;
8174 }
8175
8176 DECL_ANCHOR(fail_anchor);
8177
8178 // Otherwise there is more than one local variable target, so we'll need
8179 // to do some bookkeeping.
8180 for (size_t targets_index = 0; targets_index < targets_count; targets_index++) {
8181 const pm_node_t *target = node->targets.nodes[targets_index];
8182 RUBY_ASSERT(PM_NODE_TYPE_P(target, PM_LOCAL_VARIABLE_TARGET_NODE));
8183
8184 const pm_local_variable_target_node_t *local_target = (const pm_local_variable_target_node_t *) target;
8185 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, local_target->name, local_target->depth);
8186
8187 if (((size_t) targets_index) < (targets_count - 1)) {
8188 PUSH_INSN(ret, *location, dup);
8189 }
8190
8191 {
8192 VALUE operand = rb_id2sym(pm_constant_id_lookup(scope_node, local_target->name));
8193 PUSH_INSN1(ret, *location, putobject, operand);
8194 }
8195
8196 PUSH_SEND(ret, *location, idAREF, INT2FIX(1));
8197 PUSH_SETLOCAL(ret, *location, index.index, index.level);
8198
8199 PUSH_INSN(fail_anchor, *location, putnil);
8200 PUSH_SETLOCAL(fail_anchor, *location, index.index, index.level);
8201 }
8202
8203 // Since we matched successfully, now we'll jump to the end.
8204 PUSH_INSNL(ret, *location, jump, end_label);
8205
8206 // In the case that the match failed, we'll loop through each local
8207 // variable target and set all of them to `nil`.
8208 PUSH_LABEL(ret, fail_label);
8209 PUSH_INSN(ret, *location, pop);
8210 PUSH_SEQ(ret, fail_anchor);
8211
8212 // Finally, we can push the end label for either case.
8213 PUSH_LABEL(ret, end_label);
8214 if (popped) PUSH_INSN(ret, *location, pop);
8215}
8216
8217static inline void
8218pm_compile_next_node(rb_iseq_t *iseq, const pm_next_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8219{
8220 if (ISEQ_COMPILE_DATA(iseq)->redo_label != 0 && can_add_ensure_iseq(iseq)) {
8221 LABEL *splabel = NEW_LABEL(0);
8222 PUSH_LABEL(ret, splabel);
8223
8224 if (node->arguments) {
8225 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
8226 }
8227 else {
8228 PUSH_INSN(ret, *location, putnil);
8229 }
8230 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
8231
8232 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->redo_label);
8233 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8234
8235 PUSH_ADJUST_RESTORE(ret, splabel);
8236 if (!popped) PUSH_INSN(ret, *location, putnil);
8237 }
8238 else if (ISEQ_COMPILE_DATA(iseq)->end_label && can_add_ensure_iseq(iseq)) {
8239 LABEL *splabel = NEW_LABEL(0);
8240
8241 PUSH_LABEL(ret, splabel);
8242 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->start_label);
8243
8244 if (node->arguments != NULL) {
8245 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
8246 }
8247 else {
8248 PUSH_INSN(ret, *location, putnil);
8249 }
8250
8251 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
8252 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->end_label);
8253 PUSH_ADJUST_RESTORE(ret, splabel);
8254 splabel->unremovable = FALSE;
8255
8256 if (!popped) PUSH_INSN(ret, *location, putnil);
8257 }
8258 else {
8259 const rb_iseq_t *ip = iseq;
8260 unsigned long throw_flag = 0;
8261
8262 while (ip) {
8263 if (!ISEQ_COMPILE_DATA(ip)) {
8264 ip = 0;
8265 break;
8266 }
8267
8268 throw_flag = VM_THROW_NO_ESCAPE_FLAG;
8269 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8270 /* while loop */
8271 break;
8272 }
8273 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
8274 break;
8275 }
8276 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
8277 COMPILE_ERROR(iseq, location->line, "Invalid next");
8278 return;
8279 }
8280
8281 ip = ISEQ_BODY(ip)->parent_iseq;
8282 }
8283
8284 if (ip != 0) {
8285 if (node->arguments) {
8286 PM_COMPILE_NOT_POPPED((const pm_node_t *) node->arguments);
8287 }
8288 else {
8289 PUSH_INSN(ret, *location, putnil);
8290 }
8291
8292 PUSH_INSN1(ret, *location, throw, INT2FIX(throw_flag | TAG_NEXT));
8293 if (popped) PUSH_INSN(ret, *location, pop);
8294 }
8295 else {
8296 COMPILE_ERROR(iseq, location->line, "Invalid next");
8297 }
8298 }
8299}
8300
8301static inline void
8302pm_compile_redo_node(rb_iseq_t *iseq, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8303{
8304 if (ISEQ_COMPILE_DATA(iseq)->redo_label && can_add_ensure_iseq(iseq)) {
8305 LABEL *splabel = NEW_LABEL(0);
8306
8307 PUSH_LABEL(ret, splabel);
8308 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->redo_label);
8309 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
8310
8311 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->redo_label);
8312 PUSH_ADJUST_RESTORE(ret, splabel);
8313 if (!popped) PUSH_INSN(ret, *location, putnil);
8314 }
8315 else if (ISEQ_BODY(iseq)->type != ISEQ_TYPE_EVAL && ISEQ_COMPILE_DATA(iseq)->start_label && can_add_ensure_iseq(iseq)) {
8316 LABEL *splabel = NEW_LABEL(0);
8317
8318 PUSH_LABEL(ret, splabel);
8319 pm_add_ensure_iseq(ret, iseq, 0, scope_node);
8320 PUSH_ADJUST(ret, *location, ISEQ_COMPILE_DATA(iseq)->start_label);
8321
8322 PUSH_INSNL(ret, *location, jump, ISEQ_COMPILE_DATA(iseq)->start_label);
8323 PUSH_ADJUST_RESTORE(ret, splabel);
8324 if (!popped) PUSH_INSN(ret, *location, putnil);
8325 }
8326 else {
8327 const rb_iseq_t *ip = iseq;
8328
8329 while (ip) {
8330 if (!ISEQ_COMPILE_DATA(ip)) {
8331 ip = 0;
8332 break;
8333 }
8334
8335 if (ISEQ_COMPILE_DATA(ip)->redo_label != 0) {
8336 break;
8337 }
8338 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_BLOCK) {
8339 break;
8340 }
8341 else if (ISEQ_BODY(ip)->type == ISEQ_TYPE_EVAL) {
8342 COMPILE_ERROR(iseq, location->line, "Invalid redo");
8343 return;
8344 }
8345
8346 ip = ISEQ_BODY(ip)->parent_iseq;
8347 }
8348
8349 if (ip != 0) {
8350 PUSH_INSN(ret, *location, putnil);
8351 PUSH_INSN1(ret, *location, throw, INT2FIX(VM_THROW_NO_ESCAPE_FLAG | TAG_REDO));
8352 if (popped) PUSH_INSN(ret, *location, pop);
8353 }
8354 else {
8355 COMPILE_ERROR(iseq, location->line, "Invalid redo");
8356 }
8357 }
8358}
8359
8360static inline void
8361pm_compile_rescue_node(rb_iseq_t *iseq, const pm_rescue_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8362{
8363 iseq_set_exception_local_table(iseq);
8364
8365 // First, establish the labels that we need to be able to jump to within
8366 // this compilation block.
8367 LABEL *exception_match_label = NEW_LABEL(location->line);
8368 LABEL *rescue_end_label = NEW_LABEL(location->line);
8369
8370 // Next, compile each of the exceptions that we're going to be
8371 // handling. For each one, we'll add instructions to check if the
8372 // exception matches the raised one, and if it does then jump to the
8373 // exception_match_label label. Otherwise it will fall through to the
8374 // subsequent check. If there are no exceptions, we'll only check
8375 // StandardError.
8376 const pm_node_list_t *exceptions = &node->exceptions;
8377
8378 if (exceptions->size > 0) {
8379 for (size_t index = 0; index < exceptions->size; index++) {
8380 PUSH_GETLOCAL(ret, *location, LVAR_ERRINFO, 0);
8381 PM_COMPILE(exceptions->nodes[index]);
8382 int checkmatch_flags = VM_CHECKMATCH_TYPE_RESCUE;
8383 if (PM_NODE_TYPE_P(exceptions->nodes[index], PM_SPLAT_NODE)) {
8384 checkmatch_flags |= VM_CHECKMATCH_ARRAY;
8385 }
8386 PUSH_INSN1(ret, *location, checkmatch, INT2FIX(checkmatch_flags));
8387 PUSH_INSNL(ret, *location, branchif, exception_match_label);
8388 }
8389 }
8390 else {
8391 PUSH_GETLOCAL(ret, *location, LVAR_ERRINFO, 0);
8392 PUSH_INSN1(ret, *location, putobject, rb_eStandardError);
8393 PUSH_INSN1(ret, *location, checkmatch, INT2FIX(VM_CHECKMATCH_TYPE_RESCUE));
8394 PUSH_INSNL(ret, *location, branchif, exception_match_label);
8395 }
8396
8397 // If none of the exceptions that we are matching against matched, then
8398 // we'll jump straight to the rescue_end_label label.
8399 PUSH_INSNL(ret, *location, jump, rescue_end_label);
8400
8401 // Here we have the exception_match_label, which is where the
8402 // control-flow goes in the case that one of the exceptions matched.
8403 // Here we will compile the instructions to handle the exception.
8404 PUSH_LABEL(ret, exception_match_label);
8405 PUSH_TRACE(ret, RUBY_EVENT_RESCUE);
8406
8407 // If we have a reference to the exception, then we'll compile the write
8408 // into the instruction sequence. This can look quite different
8409 // depending on the kind of write being performed.
8410 if (node->reference) {
8411 DECL_ANCHOR(writes);
8412 DECL_ANCHOR(cleanup);
8413
8414 pm_compile_target_node(iseq, node->reference, ret, writes, cleanup, scope_node, NULL);
8415 PUSH_GETLOCAL(ret, *location, LVAR_ERRINFO, 0);
8416
8417 PUSH_SEQ(ret, writes);
8418 PUSH_SEQ(ret, cleanup);
8419 }
8420
8421 // If we have statements to execute, we'll compile them here. Otherwise
8422 // we'll push nil onto the stack.
8423 if (node->statements != NULL) {
8424 // We'll temporarily remove the end_label location from the iseq
8425 // when compiling the statements so that next/redo statements
8426 // inside the body will throw to the correct place instead of
8427 // jumping straight to the end of this iseq
8428 LABEL *prev_end = ISEQ_COMPILE_DATA(iseq)->end_label;
8429 ISEQ_COMPILE_DATA(iseq)->end_label = NULL;
8430
8431 PM_COMPILE((const pm_node_t *) node->statements);
8432
8433 // Now restore the end_label
8434 ISEQ_COMPILE_DATA(iseq)->end_label = prev_end;
8435 }
8436 else {
8437 PUSH_INSN(ret, *location, putnil);
8438 }
8439
8440 PUSH_INSN(ret, *location, leave);
8441
8442 // Here we'll insert the rescue_end_label label, which is jumped to if
8443 // none of the exceptions matched. It will cause the control-flow to
8444 // either jump to the next rescue clause or it will fall through to the
8445 // subsequent instruction returning the raised error.
8446 PUSH_LABEL(ret, rescue_end_label);
8447 if (node->subsequent != NULL) {
8448 PM_COMPILE((const pm_node_t *) node->subsequent);
8449 }
8450 else {
8451 PUSH_GETLOCAL(ret, *location, 1, 0);
8452 }
8453}
8454
8455static inline void
8456pm_compile_return_node(rb_iseq_t *iseq, const pm_return_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8457{
8458 const pm_arguments_node_t *arguments = node->arguments;
8459 enum rb_iseq_type type = ISEQ_BODY(iseq)->type;
8460 LABEL *splabel = 0;
8461
8462 const rb_iseq_t *parent_iseq = iseq;
8463 enum rb_iseq_type parent_type = ISEQ_BODY(parent_iseq)->type;
8464 while (parent_type == ISEQ_TYPE_RESCUE || parent_type == ISEQ_TYPE_ENSURE) {
8465 if (!(parent_iseq = ISEQ_BODY(parent_iseq)->parent_iseq)) break;
8466 parent_type = ISEQ_BODY(parent_iseq)->type;
8467 }
8468
8469 switch (parent_type) {
8470 case ISEQ_TYPE_TOP:
8471 case ISEQ_TYPE_MAIN:
8472 if (arguments) {
8473 rb_warn("argument of top-level return is ignored");
8474 }
8475 if (parent_iseq == iseq) {
8476 type = ISEQ_TYPE_METHOD;
8477 }
8478 break;
8479 default:
8480 break;
8481 }
8482
8483 if (type == ISEQ_TYPE_METHOD) {
8484 splabel = NEW_LABEL(0);
8485 PUSH_LABEL(ret, splabel);
8486 PUSH_ADJUST(ret, *location, 0);
8487 }
8488
8489 if (arguments != NULL) {
8490 PM_COMPILE_NOT_POPPED((const pm_node_t *) arguments);
8491 }
8492 else {
8493 PUSH_INSN(ret, *location, putnil);
8494 }
8495
8496 if (type == ISEQ_TYPE_METHOD && can_add_ensure_iseq(iseq)) {
8497 pm_add_ensure_iseq(ret, iseq, 1, scope_node);
8498 PUSH_TRACE(ret, RUBY_EVENT_RETURN);
8499 PUSH_INSN(ret, *location, leave);
8500 PUSH_ADJUST_RESTORE(ret, splabel);
8501 if (!popped) PUSH_INSN(ret, *location, putnil);
8502 }
8503 else {
8504 PUSH_INSN1(ret, *location, throw, INT2FIX(TAG_RETURN));
8505 if (popped) PUSH_INSN(ret, *location, pop);
8506 }
8507}
8508
8509static inline void
8510pm_compile_super_node(rb_iseq_t *iseq, const pm_super_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8511{
8512 DECL_ANCHOR(args);
8513
8514 LABEL *retry_label = NEW_LABEL(location->line);
8515 LABEL *retry_end_l = NEW_LABEL(location->line);
8516
8517 const rb_iseq_t *previous_block = ISEQ_COMPILE_DATA(iseq)->current_block;
8518 const rb_iseq_t *current_block;
8519 ISEQ_COMPILE_DATA(iseq)->current_block = current_block = NULL;
8520
8521 PUSH_LABEL(ret, retry_label);
8522 PUSH_INSN(ret, *location, putself);
8523
8524 int flags = 0;
8525 struct rb_callinfo_kwarg *keywords = NULL;
8526 int argc = pm_setup_args(node->arguments, node->block, &flags, &keywords, iseq, ret, scope_node, location);
8527 bool is_forwardable = (node->arguments != NULL) && PM_NODE_FLAG_P(node->arguments, PM_ARGUMENTS_NODE_FLAGS_CONTAINS_FORWARDING);
8528 flags |= VM_CALL_SUPER | VM_CALL_FCALL;
8529
8530 if (node->block && PM_NODE_TYPE_P(node->block, PM_BLOCK_NODE)) {
8531 pm_scope_node_t next_scope_node;
8532 pm_scope_node_init(node->block, &next_scope_node, scope_node);
8533
8534 ISEQ_COMPILE_DATA(iseq)->current_block = current_block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location->line);
8535 pm_scope_node_destroy(&next_scope_node);
8536 }
8537
8538 if (!node->block) {
8539 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
8540 }
8541
8542 if ((flags & VM_CALL_ARGS_BLOCKARG) && (flags & VM_CALL_KW_SPLAT) && !(flags & VM_CALL_KW_SPLAT_MUT)) {
8543 PUSH_INSN(args, *location, splatkw);
8544 }
8545
8546 PUSH_SEQ(ret, args);
8547 if (is_forwardable && ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->param.flags.forwardable) {
8548 flags |= VM_CALL_FORWARDING;
8549
8550 {
8551 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, argc, flags, keywords, current_block != NULL);
8552 PUSH_INSN2(ret, *location, invokesuperforward, callinfo, current_block);
8553 }
8554 }
8555 else {
8556 {
8557 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, argc, flags, keywords, current_block != NULL);
8558 PUSH_INSN2(ret, *location, invokesuper, callinfo, current_block);
8559 }
8560
8561 }
8562
8563 pm_compile_retry_end_label(iseq, ret, retry_end_l);
8564
8565 if (popped) PUSH_INSN(ret, *location, pop);
8566 ISEQ_COMPILE_DATA(iseq)->current_block = previous_block;
8567 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, current_block, retry_end_l);
8568}
8569
8570static inline void
8571pm_compile_yield_node(rb_iseq_t *iseq, const pm_yield_node_t *node, const pm_node_location_t *location, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8572{
8573 switch (ISEQ_BODY(ISEQ_BODY(iseq)->local_iseq)->type) {
8574 case ISEQ_TYPE_TOP:
8575 case ISEQ_TYPE_MAIN:
8576 case ISEQ_TYPE_CLASS:
8577 COMPILE_ERROR(iseq, location->line, "Invalid yield");
8578 return;
8579 default: /* valid */;
8580 }
8581
8582 int argc = 0;
8583 int flags = 0;
8584 struct rb_callinfo_kwarg *keywords = NULL;
8585
8586 if (node->arguments) {
8587 argc = pm_setup_args(node->arguments, NULL, &flags, &keywords, iseq, ret, scope_node, location);
8588 }
8589
8590 const struct rb_callinfo *callinfo = new_callinfo(iseq, 0, argc, flags, keywords, FALSE);
8591 PUSH_INSN1(ret, *location, invokeblock, callinfo);
8592
8593 iseq_set_use_block(ISEQ_BODY(iseq)->local_iseq);
8594 if (popped) PUSH_INSN(ret, *location, pop);
8595
8596 int level = 0;
8597 for (const rb_iseq_t *tmp_iseq = iseq; tmp_iseq != ISEQ_BODY(iseq)->local_iseq; level++) {
8598 tmp_iseq = ISEQ_BODY(tmp_iseq)->parent_iseq;
8599 }
8600
8601 if (level > 0) access_outer_variables(iseq, level, rb_intern("yield"), true);
8602}
8603
8614static void
8615pm_compile_node(rb_iseq_t *iseq, const pm_node_t *node, LINK_ANCHOR *const ret, bool popped, pm_scope_node_t *scope_node)
8616{
8617 const pm_parser_t *parser = scope_node->parser;
8618 const pm_node_location_t location = PM_NODE_START_LOCATION(parser, node);
8619 int lineno = (int) location.line;
8620
8621 if (PM_NODE_TYPE_P(node, PM_BEGIN_NODE) && (((const pm_begin_node_t *) node)->statements == NULL) && (((const pm_begin_node_t *) node)->rescue_clause != NULL)) {
8622 // If this node is a begin node and it has empty statements and also
8623 // has a rescue clause, then the other parser considers it as
8624 // starting on the same line as the rescue, as opposed to the
8625 // location of the begin keyword. We replicate that behavior here.
8626 lineno = (int) PM_NODE_START_LINE_COLUMN(parser, ((const pm_begin_node_t *) node)->rescue_clause).line;
8627 }
8628
8629 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_NEWLINE) && ISEQ_COMPILE_DATA(iseq)->last_line != lineno) {
8630 // If this node has the newline flag set and it is on a new line
8631 // from the previous nodes that have been compiled for this ISEQ,
8632 // then we need to emit a newline event.
8633 int event = RUBY_EVENT_LINE;
8634
8635 ISEQ_COMPILE_DATA(iseq)->last_line = lineno;
8636 if (lineno > 0 && ISEQ_COVERAGE(iseq) && ISEQ_LINE_COVERAGE(iseq)) {
8637 event |= RUBY_EVENT_COVERAGE_LINE;
8638 }
8639 PUSH_TRACE(ret, event);
8640 }
8641
8642 switch (PM_NODE_TYPE(node)) {
8643 case PM_ALIAS_GLOBAL_VARIABLE_NODE:
8644 // alias $foo $bar
8645 // ^^^^^^^^^^^^^^^
8646 pm_compile_alias_global_variable_node(iseq, (const pm_alias_global_variable_node_t *) node, &location, ret, popped, scope_node);
8647 return;
8648 case PM_ALIAS_METHOD_NODE:
8649 // alias foo bar
8650 // ^^^^^^^^^^^^^
8651 pm_compile_alias_method_node(iseq, (const pm_alias_method_node_t *) node, &location, ret, popped, scope_node);
8652 return;
8653 case PM_AND_NODE:
8654 // a and b
8655 // ^^^^^^^
8656 pm_compile_and_node(iseq, (const pm_and_node_t *) node, &location, ret, popped, scope_node);
8657 return;
8658 case PM_ARGUMENTS_NODE: {
8659 // break foo
8660 // ^^^
8661 //
8662 // These are ArgumentsNodes that are not compiled directly by their
8663 // parent call nodes, used in the cases of NextNodes, ReturnNodes, and
8664 // BreakNodes. They can create an array like ArrayNode.
8665 const pm_arguments_node_t *cast = (const pm_arguments_node_t *) node;
8666 const pm_node_list_t *elements = &cast->arguments;
8667
8668 if (elements->size == 1) {
8669 // If we are only returning a single element through one of the jump
8670 // nodes, then we will only compile that node directly.
8671 PM_COMPILE(elements->nodes[0]);
8672 }
8673 else {
8674 pm_compile_array_node(iseq, (const pm_node_t *) cast, elements, &location, ret, popped, scope_node);
8675 }
8676 return;
8677 }
8678 case PM_ARRAY_NODE: {
8679 // [foo, bar, baz]
8680 // ^^^^^^^^^^^^^^^
8681 const pm_array_node_t *cast = (const pm_array_node_t *) node;
8682 pm_compile_array_node(iseq, (const pm_node_t *) cast, &cast->elements, &location, ret, popped, scope_node);
8683 return;
8684 }
8685 case PM_ASSOC_NODE: {
8686 // { foo: 1 }
8687 // ^^^^^^
8688 //
8689 // foo(bar: 1)
8690 // ^^^^^^
8691 const pm_assoc_node_t *cast = (const pm_assoc_node_t *) node;
8692
8693 PM_COMPILE(cast->key);
8694 PM_COMPILE(cast->value);
8695
8696 return;
8697 }
8698 case PM_ASSOC_SPLAT_NODE: {
8699 // { **foo }
8700 // ^^^^^
8701 //
8702 // def foo(**); bar(**); end
8703 // ^^
8704 const pm_assoc_splat_node_t *cast = (const pm_assoc_splat_node_t *) node;
8705
8706 if (cast->value != NULL) {
8707 PM_COMPILE(cast->value);
8708 }
8709 else if (!popped) {
8710 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_POW, 0);
8711 PUSH_GETLOCAL(ret, location, index.index, index.level);
8712 }
8713
8714 return;
8715 }
8716 case PM_BACK_REFERENCE_READ_NODE: {
8717 // $+
8718 // ^^
8719 if (!popped) {
8721 VALUE backref = pm_compile_back_reference_ref(cast);
8722
8723 PUSH_INSN2(ret, location, getspecial, INT2FIX(1), backref);
8724 }
8725 return;
8726 }
8727 case PM_BEGIN_NODE: {
8728 // begin end
8729 // ^^^^^^^^^
8730 const pm_begin_node_t *cast = (const pm_begin_node_t *) node;
8731
8732 if (cast->ensure_clause) {
8733 // Compiling the ensure clause will compile the rescue clause (if
8734 // there is one), which will compile the begin statements.
8735 pm_compile_ensure(iseq, cast, &location, ret, popped, scope_node);
8736 }
8737 else if (cast->rescue_clause) {
8738 // Compiling rescue will compile begin statements (if applicable).
8739 pm_compile_rescue(iseq, cast, &location, ret, popped, scope_node);
8740 }
8741 else {
8742 // If there is neither ensure or rescue, the just compile the
8743 // statements.
8744 if (cast->statements != NULL) {
8745 PM_COMPILE((const pm_node_t *) cast->statements);
8746 }
8747 else if (!popped) {
8748 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
8749 }
8750 }
8751 return;
8752 }
8753 case PM_BLOCK_ARGUMENT_NODE: {
8754 // foo(&bar)
8755 // ^^^^
8756 const pm_block_argument_node_t *cast = (const pm_block_argument_node_t *) node;
8757
8758 if (cast->expression != NULL) {
8759 PM_COMPILE(cast->expression);
8760 }
8761 else {
8762 // If there's no expression, this must be block forwarding.
8763 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, PM_CONSTANT_AND, 0);
8764 PUSH_INSN2(ret, location, getblockparamproxy, INT2FIX(local_index.index + VM_ENV_DATA_SIZE - 1), INT2FIX(local_index.level));
8765 }
8766 return;
8767 }
8768 case PM_BREAK_NODE:
8769 // break
8770 // ^^^^^
8771 //
8772 // break foo
8773 // ^^^^^^^^^
8774 pm_compile_break_node(iseq, (const pm_break_node_t *) node, &location, ret, popped, scope_node);
8775 return;
8776 case PM_CALL_NODE:
8777 // foo
8778 // ^^^
8779 //
8780 // foo.bar
8781 // ^^^^^^^
8782 //
8783 // foo.bar() {}
8784 // ^^^^^^^^^^^^
8785 pm_compile_call_node(iseq, (const pm_call_node_t *) node, ret, popped, scope_node);
8786 return;
8787 case PM_CALL_AND_WRITE_NODE: {
8788 // foo.bar &&= baz
8789 // ^^^^^^^^^^^^^^^
8790 const pm_call_and_write_node_t *cast = (const pm_call_and_write_node_t *) node;
8791 pm_compile_call_and_or_write_node(iseq, true, cast->receiver, cast->value, cast->write_name, cast->read_name, PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION), &location, ret, popped, scope_node);
8792 return;
8793 }
8794 case PM_CALL_OR_WRITE_NODE: {
8795 // foo.bar ||= baz
8796 // ^^^^^^^^^^^^^^^
8797 const pm_call_or_write_node_t *cast = (const pm_call_or_write_node_t *) node;
8798 pm_compile_call_and_or_write_node(iseq, false, cast->receiver, cast->value, cast->write_name, cast->read_name, PM_NODE_FLAG_P(cast, PM_CALL_NODE_FLAGS_SAFE_NAVIGATION), &location, ret, popped, scope_node);
8799 return;
8800 }
8801 case PM_CALL_OPERATOR_WRITE_NODE:
8802 // foo.bar += baz
8803 // ^^^^^^^^^^^^^^^
8804 //
8805 // Call operator writes occur when you have a call node on the left-hand
8806 // side of a write operator that is not `=`. As an example,
8807 // `foo.bar *= 1`. This breaks down to caching the receiver on the
8808 // stack and then performing three method calls, one to read the value,
8809 // one to compute the result, and one to write the result back to the
8810 // receiver.
8811 pm_compile_call_operator_write_node(iseq, (const pm_call_operator_write_node_t *) node, &location, ret, popped, scope_node);
8812 return;
8813 case PM_CASE_NODE:
8814 // case foo; when bar; end
8815 // ^^^^^^^^^^^^^^^^^^^^^^^
8816 pm_compile_case_node(iseq, (const pm_case_node_t *) node, &location, ret, popped, scope_node);
8817 return;
8818 case PM_CASE_MATCH_NODE:
8819 // case foo; in bar; end
8820 // ^^^^^^^^^^^^^^^^^^^^^
8821 //
8822 // If you use the `case` keyword to create a case match node, it will
8823 // match against all of the `in` clauses until it finds one that
8824 // matches. If it doesn't find one, it can optionally fall back to an
8825 // `else` clause. If none is present and a match wasn't found, it will
8826 // raise an appropriate error.
8827 pm_compile_case_match_node(iseq, (const pm_case_match_node_t *) node, &location, ret, popped, scope_node);
8828 return;
8829 case PM_CLASS_NODE: {
8830 // class Foo; end
8831 // ^^^^^^^^^^^^^^
8832 const pm_class_node_t *cast = (const pm_class_node_t *) node;
8833
8834 ID class_id = pm_constant_id_lookup(scope_node, cast->name);
8835 VALUE class_name = rb_str_freeze(rb_sprintf("<class:%"PRIsVALUE">", rb_id2str(class_id)));
8836
8837 pm_scope_node_t next_scope_node;
8838 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
8839
8840 const rb_iseq_t *class_iseq = NEW_CHILD_ISEQ(&next_scope_node, class_name, ISEQ_TYPE_CLASS, location.line);
8841 pm_scope_node_destroy(&next_scope_node);
8842
8843 // TODO: Once we merge constant path nodes correctly, fix this flag
8844 const int flags = VM_DEFINECLASS_TYPE_CLASS |
8845 (cast->superclass ? VM_DEFINECLASS_FLAG_HAS_SUPERCLASS : 0) |
8846 pm_compile_class_path(iseq, cast->constant_path, &location, ret, false, scope_node);
8847
8848 if (cast->superclass) {
8849 PM_COMPILE_NOT_POPPED(cast->superclass);
8850 }
8851 else {
8852 PUSH_INSN(ret, location, putnil);
8853 }
8854
8855 {
8856 VALUE operand = ID2SYM(class_id);
8857 PUSH_INSN3(ret, location, defineclass, operand, class_iseq, INT2FIX(flags));
8858 }
8859 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE)class_iseq);
8860
8861 if (popped) PUSH_INSN(ret, location, pop);
8862 return;
8863 }
8864 case PM_CLASS_VARIABLE_AND_WRITE_NODE: {
8865 // @@foo &&= bar
8866 // ^^^^^^^^^^^^^
8868 LABEL *end_label = NEW_LABEL(location.line);
8869
8870 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
8871 VALUE name = ID2SYM(name_id);
8872
8873 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
8874 if (!popped) PUSH_INSN(ret, location, dup);
8875
8876 PUSH_INSNL(ret, location, branchunless, end_label);
8877 if (!popped) PUSH_INSN(ret, location, pop);
8878
8879 PM_COMPILE_NOT_POPPED(cast->value);
8880 if (!popped) PUSH_INSN(ret, location, dup);
8881
8882 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
8883 PUSH_LABEL(ret, end_label);
8884
8885 return;
8886 }
8887 case PM_CLASS_VARIABLE_OPERATOR_WRITE_NODE: {
8888 // @@foo += bar
8889 // ^^^^^^^^^^^^
8891
8892 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
8893 VALUE name = ID2SYM(name_id);
8894
8895 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
8896 PM_COMPILE_NOT_POPPED(cast->value);
8897
8898 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
8899 int flags = VM_CALL_ARGS_SIMPLE;
8900 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
8901
8902 if (!popped) PUSH_INSN(ret, location, dup);
8903 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
8904
8905 return;
8906 }
8907 case PM_CLASS_VARIABLE_OR_WRITE_NODE: {
8908 // @@foo ||= bar
8909 // ^^^^^^^^^^^^^
8911 LABEL *end_label = NEW_LABEL(location.line);
8912 LABEL *start_label = NEW_LABEL(location.line);
8913
8914 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
8915 VALUE name = ID2SYM(name_id);
8916
8917 PUSH_INSN(ret, location, putnil);
8918 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_CVAR), name, Qtrue);
8919 PUSH_INSNL(ret, location, branchunless, start_label);
8920
8921 PUSH_INSN2(ret, location, getclassvariable, name, get_cvar_ic_value(iseq, name_id));
8922 if (!popped) PUSH_INSN(ret, location, dup);
8923
8924 PUSH_INSNL(ret, location, branchif, end_label);
8925 if (!popped) PUSH_INSN(ret, location, pop);
8926
8927 PUSH_LABEL(ret, start_label);
8928 PM_COMPILE_NOT_POPPED(cast->value);
8929 if (!popped) PUSH_INSN(ret, location, dup);
8930
8931 PUSH_INSN2(ret, location, setclassvariable, name, get_cvar_ic_value(iseq, name_id));
8932 PUSH_LABEL(ret, end_label);
8933
8934 return;
8935 }
8936 case PM_CLASS_VARIABLE_READ_NODE: {
8937 // @@foo
8938 // ^^^^^
8939 if (!popped) {
8941 ID name = pm_constant_id_lookup(scope_node, cast->name);
8942 PUSH_INSN2(ret, location, getclassvariable, ID2SYM(name), get_cvar_ic_value(iseq, name));
8943 }
8944 return;
8945 }
8946 case PM_CLASS_VARIABLE_WRITE_NODE: {
8947 // @@foo = 1
8948 // ^^^^^^^^^
8950 PM_COMPILE_NOT_POPPED(cast->value);
8951 if (!popped) PUSH_INSN(ret, location, dup);
8952
8953 ID name = pm_constant_id_lookup(scope_node, cast->name);
8954 PUSH_INSN2(ret, location, setclassvariable, ID2SYM(name), get_cvar_ic_value(iseq, name));
8955
8956 return;
8957 }
8958 case PM_CONSTANT_PATH_NODE: {
8959 // Foo::Bar
8960 // ^^^^^^^^
8961 VALUE parts;
8962
8963 if (ISEQ_COMPILE_DATA(iseq)->option->inline_const_cache && ((parts = pm_constant_path_parts(node, scope_node)) != Qnil)) {
8964 ISEQ_BODY(iseq)->ic_size++;
8965 RB_OBJ_SET_SHAREABLE(parts);
8966 PUSH_INSN1(ret, location, opt_getconstant_path, parts);
8967 }
8968 else {
8969 DECL_ANCHOR(prefix);
8970 DECL_ANCHOR(body);
8971
8972 pm_compile_constant_path(iseq, node, prefix, body, popped, scope_node);
8973 if (LIST_INSN_SIZE_ZERO(prefix)) {
8974 PUSH_INSN(ret, location, putnil);
8975 }
8976 else {
8977 PUSH_SEQ(ret, prefix);
8978 }
8979
8980 PUSH_SEQ(ret, body);
8981 }
8982
8983 if (popped) PUSH_INSN(ret, location, pop);
8984 return;
8985 }
8986 case PM_CONSTANT_PATH_AND_WRITE_NODE: {
8987 // Foo::Bar &&= baz
8988 // ^^^^^^^^^^^^^^^^
8990 pm_compile_constant_path_and_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
8991 return;
8992 }
8993 case PM_CONSTANT_PATH_OR_WRITE_NODE: {
8994 // Foo::Bar ||= baz
8995 // ^^^^^^^^^^^^^^^^
8997 pm_compile_constant_path_or_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
8998 return;
8999 }
9000 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE: {
9001 // Foo::Bar += baz
9002 // ^^^^^^^^^^^^^^^
9004 pm_compile_constant_path_operator_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9005 return;
9006 }
9007 case PM_CONSTANT_PATH_WRITE_NODE: {
9008 // Foo::Bar = 1
9009 // ^^^^^^^^^^^^
9011 pm_compile_constant_path_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9012 return;
9013 }
9014 case PM_CONSTANT_READ_NODE: {
9015 // Foo
9016 // ^^^
9017 const pm_constant_read_node_t *cast = (const pm_constant_read_node_t *) node;
9018 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9019
9020 pm_compile_constant_read(iseq, name, &cast->base.location, location.node_id, ret, scope_node);
9021 if (popped) PUSH_INSN(ret, location, pop);
9022
9023 return;
9024 }
9025 case PM_CONSTANT_AND_WRITE_NODE: {
9026 // Foo &&= bar
9027 // ^^^^^^^^^^^
9029 pm_compile_constant_and_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9030 return;
9031 }
9032 case PM_CONSTANT_OR_WRITE_NODE: {
9033 // Foo ||= bar
9034 // ^^^^^^^^^^^
9035 const pm_constant_or_write_node_t *cast = (const pm_constant_or_write_node_t *) node;
9036 pm_compile_constant_or_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9037 return;
9038 }
9039 case PM_CONSTANT_OPERATOR_WRITE_NODE: {
9040 // Foo += bar
9041 // ^^^^^^^^^^
9043 pm_compile_constant_operator_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9044 return;
9045 }
9046 case PM_CONSTANT_WRITE_NODE: {
9047 // Foo = 1
9048 // ^^^^^^^
9049 const pm_constant_write_node_t *cast = (const pm_constant_write_node_t *) node;
9050 pm_compile_constant_write_node(iseq, cast, 0, &location, ret, popped, scope_node);
9051 return;
9052 }
9053 case PM_DEF_NODE: {
9054 // def foo; end
9055 // ^^^^^^^^^^^^
9056 //
9057 // def self.foo; end
9058 // ^^^^^^^^^^^^^^^^^
9059 const pm_def_node_t *cast = (const pm_def_node_t *) node;
9060 ID method_name = pm_constant_id_lookup(scope_node, cast->name);
9061
9062 pm_scope_node_t next_scope_node;
9063 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
9064
9065 rb_iseq_t *method_iseq = NEW_ISEQ(&next_scope_node, rb_id2str(method_name), ISEQ_TYPE_METHOD, location.line);
9066 pm_scope_node_destroy(&next_scope_node);
9067
9068 if (cast->receiver) {
9069 PM_COMPILE_NOT_POPPED(cast->receiver);
9070 PUSH_INSN2(ret, location, definesmethod, ID2SYM(method_name), method_iseq);
9071 }
9072 else {
9073 PUSH_INSN2(ret, location, definemethod, ID2SYM(method_name), method_iseq);
9074 }
9075 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) method_iseq);
9076
9077 if (!popped) {
9078 PUSH_INSN1(ret, location, putobject, ID2SYM(method_name));
9079 }
9080
9081 return;
9082 }
9083 case PM_DEFINED_NODE: {
9084 // defined?(a)
9085 // ^^^^^^^^^^^
9086 const pm_defined_node_t *cast = (const pm_defined_node_t *) node;
9087 pm_compile_defined_expr(iseq, cast->value, &location, ret, popped, scope_node, false);
9088 return;
9089 }
9090 case PM_EMBEDDED_STATEMENTS_NODE: {
9091 // "foo #{bar}"
9092 // ^^^^^^
9094
9095 if (cast->statements != NULL) {
9096 PM_COMPILE((const pm_node_t *) (cast->statements));
9097 }
9098 else {
9099 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
9100 }
9101
9102 if (popped) PUSH_INSN(ret, location, pop);
9103 return;
9104 }
9105 case PM_EMBEDDED_VARIABLE_NODE: {
9106 // "foo #@bar"
9107 // ^^^^^
9108 const pm_embedded_variable_node_t *cast = (const pm_embedded_variable_node_t *) node;
9109 PM_COMPILE(cast->variable);
9110 return;
9111 }
9112 case PM_FALSE_NODE: {
9113 // false
9114 // ^^^^^
9115 if (!popped) {
9116 PUSH_INSN1(ret, location, putobject, Qfalse);
9117 }
9118 return;
9119 }
9120 case PM_ENSURE_NODE: {
9121 const pm_ensure_node_t *cast = (const pm_ensure_node_t *) node;
9122
9123 if (cast->statements != NULL) {
9124 PM_COMPILE((const pm_node_t *) cast->statements);
9125 }
9126
9127 return;
9128 }
9129 case PM_ELSE_NODE: {
9130 // if foo then bar else baz end
9131 // ^^^^^^^^^^^^
9132 const pm_else_node_t *cast = (const pm_else_node_t *) node;
9133
9134 if (cast->statements != NULL) {
9135 PM_COMPILE((const pm_node_t *) cast->statements);
9136 }
9137 else if (!popped) {
9138 PUSH_SYNTHETIC_PUTNIL(ret, iseq);
9139 }
9140
9141 return;
9142 }
9143 case PM_FLIP_FLOP_NODE: {
9144 // if foo .. bar; end
9145 // ^^^^^^^^^^
9146 const pm_flip_flop_node_t *cast = (const pm_flip_flop_node_t *) node;
9147
9148 LABEL *final_label = NEW_LABEL(location.line);
9149 LABEL *then_label = NEW_LABEL(location.line);
9150 LABEL *else_label = NEW_LABEL(location.line);
9151
9152 pm_compile_flip_flop(cast, else_label, then_label, iseq, location.line, ret, popped, scope_node);
9153
9154 PUSH_LABEL(ret, then_label);
9155 PUSH_INSN1(ret, location, putobject, Qtrue);
9156 PUSH_INSNL(ret, location, jump, final_label);
9157 PUSH_LABEL(ret, else_label);
9158 PUSH_INSN1(ret, location, putobject, Qfalse);
9159 PUSH_LABEL(ret, final_label);
9160
9161 return;
9162 }
9163 case PM_FLOAT_NODE: {
9164 // 1.0
9165 // ^^^
9166 if (!popped) {
9167 VALUE operand = parse_float((const pm_float_node_t *) node);
9168 PUSH_INSN1(ret, location, putobject, operand);
9169 }
9170 return;
9171 }
9172 case PM_FOR_NODE: {
9173 // for foo in bar do end
9174 // ^^^^^^^^^^^^^^^^^^^^^
9175 const pm_for_node_t *cast = (const pm_for_node_t *) node;
9176
9177 LABEL *retry_label = NEW_LABEL(location.line);
9178 LABEL *retry_end_l = NEW_LABEL(location.line);
9179
9180 // First, compile the collection that we're going to be iterating over.
9181 PUSH_LABEL(ret, retry_label);
9182 PM_COMPILE_NOT_POPPED(cast->collection);
9183
9184 // Next, create the new scope that is going to contain the block that
9185 // will be passed to the each method.
9186 pm_scope_node_t next_scope_node;
9187 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
9188
9189 const rb_iseq_t *child_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, location.line);
9190 pm_scope_node_destroy(&next_scope_node);
9191
9192 const rb_iseq_t *prev_block = ISEQ_COMPILE_DATA(iseq)->current_block;
9193 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq;
9194
9195 // Now, create the method call to each that will be used to iterate over
9196 // the collection, and pass the newly created iseq as the block.
9197 PUSH_SEND_WITH_BLOCK(ret, location, idEach, INT2FIX(0), child_iseq);
9198 pm_compile_retry_end_label(iseq, ret, retry_end_l);
9199
9200 if (popped) PUSH_INSN(ret, location, pop);
9201 ISEQ_COMPILE_DATA(iseq)->current_block = prev_block;
9202 PUSH_CATCH_ENTRY(CATCH_TYPE_BREAK, retry_label, retry_end_l, child_iseq, retry_end_l);
9203 return;
9204 }
9205 case PM_FORWARDING_ARGUMENTS_NODE:
9206 rb_bug("Cannot compile a ForwardingArgumentsNode directly\n");
9207 return;
9208 case PM_FORWARDING_SUPER_NODE:
9209 // super
9210 // ^^^^^
9211 //
9212 // super {}
9213 // ^^^^^^^^
9214 pm_compile_forwarding_super_node(iseq, (const pm_forwarding_super_node_t *) node, &location, ret, popped, scope_node);
9215 return;
9216 case PM_GLOBAL_VARIABLE_AND_WRITE_NODE: {
9217 // $foo &&= bar
9218 // ^^^^^^^^^^^^
9220 LABEL *end_label = NEW_LABEL(location.line);
9221
9222 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9223 PUSH_INSN1(ret, location, getglobal, name);
9224 if (!popped) PUSH_INSN(ret, location, dup);
9225
9226 PUSH_INSNL(ret, location, branchunless, end_label);
9227 if (!popped) PUSH_INSN(ret, location, pop);
9228
9229 PM_COMPILE_NOT_POPPED(cast->value);
9230 if (!popped) PUSH_INSN(ret, location, dup);
9231
9232 PUSH_INSN1(ret, location, setglobal, name);
9233 PUSH_LABEL(ret, end_label);
9234
9235 return;
9236 }
9237 case PM_GLOBAL_VARIABLE_OPERATOR_WRITE_NODE: {
9238 // $foo += bar
9239 // ^^^^^^^^^^^
9241
9242 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9243 PUSH_INSN1(ret, location, getglobal, name);
9244 PM_COMPILE_NOT_POPPED(cast->value);
9245
9246 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
9247 int flags = VM_CALL_ARGS_SIMPLE;
9248 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
9249
9250 if (!popped) PUSH_INSN(ret, location, dup);
9251 PUSH_INSN1(ret, location, setglobal, name);
9252
9253 return;
9254 }
9255 case PM_GLOBAL_VARIABLE_OR_WRITE_NODE: {
9256 // $foo ||= bar
9257 // ^^^^^^^^^^^^
9259 LABEL *set_label = NEW_LABEL(location.line);
9260 LABEL *end_label = NEW_LABEL(location.line);
9261
9262 PUSH_INSN(ret, location, putnil);
9263 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9264
9265 PUSH_INSN3(ret, location, defined, INT2FIX(DEFINED_GVAR), name, Qtrue);
9266 PUSH_INSNL(ret, location, branchunless, set_label);
9267
9268 PUSH_INSN1(ret, location, getglobal, name);
9269 if (!popped) PUSH_INSN(ret, location, dup);
9270
9271 PUSH_INSNL(ret, location, branchif, end_label);
9272 if (!popped) PUSH_INSN(ret, location, pop);
9273
9274 PUSH_LABEL(ret, set_label);
9275 PM_COMPILE_NOT_POPPED(cast->value);
9276 if (!popped) PUSH_INSN(ret, location, dup);
9277
9278 PUSH_INSN1(ret, location, setglobal, name);
9279 PUSH_LABEL(ret, end_label);
9280
9281 return;
9282 }
9283 case PM_GLOBAL_VARIABLE_READ_NODE: {
9284 // $foo
9285 // ^^^^
9287 VALUE name = ID2SYM(pm_constant_id_lookup(scope_node, cast->name));
9288
9289 PUSH_INSN1(ret, location, getglobal, name);
9290 if (popped) PUSH_INSN(ret, location, pop);
9291
9292 return;
9293 }
9294 case PM_GLOBAL_VARIABLE_WRITE_NODE: {
9295 // $foo = 1
9296 // ^^^^^^^^
9298 PM_COMPILE_NOT_POPPED(cast->value);
9299 if (!popped) PUSH_INSN(ret, location, dup);
9300
9301 ID name = pm_constant_id_lookup(scope_node, cast->name);
9302 PUSH_INSN1(ret, location, setglobal, ID2SYM(name));
9303
9304 return;
9305 }
9306 case PM_HASH_NODE: {
9307 // {}
9308 // ^^
9309 //
9310 // If every node in the hash is static, then we can compile the entire
9311 // hash now instead of later.
9312 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
9313 // We're only going to compile this node if it's not popped. If it
9314 // is popped, then we know we don't need to do anything since it's
9315 // statically known.
9316 if (!popped) {
9317 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
9318
9319 if (cast->elements.size == 0) {
9320 PUSH_INSN1(ret, location, newhash, INT2FIX(0));
9321 }
9322 else {
9323 VALUE value = pm_static_literal_value(iseq, node, scope_node);
9324 PUSH_INSN1(ret, location, duphash, value);
9325 RB_OBJ_WRITTEN(iseq, Qundef, value);
9326 }
9327 }
9328 }
9329 else {
9330 // Here since we know there are possible side-effects inside the
9331 // hash contents, we're going to build it entirely at runtime. We'll
9332 // do this by pushing all of the key-value pairs onto the stack and
9333 // then combining them with newhash.
9334 //
9335 // If this hash is popped, then this serves only to ensure we enact
9336 // all side-effects (like method calls) that are contained within
9337 // the hash contents.
9338 const pm_hash_node_t *cast = (const pm_hash_node_t *) node;
9339 const pm_node_list_t *elements = &cast->elements;
9340
9341 if (popped) {
9342 // If this hash is popped, then we can iterate through each
9343 // element and compile it. The result of each compilation will
9344 // only include the side effects of the element itself.
9345 for (size_t index = 0; index < elements->size; index++) {
9346 PM_COMPILE_POPPED(elements->nodes[index]);
9347 }
9348 }
9349 else {
9350 pm_compile_hash_elements(iseq, node, elements, 0, Qundef, false, ret, scope_node);
9351 }
9352 }
9353
9354 return;
9355 }
9356 case PM_IF_NODE: {
9357 // if foo then bar end
9358 // ^^^^^^^^^^^^^^^^^^^
9359 //
9360 // bar if foo
9361 // ^^^^^^^^^^
9362 //
9363 // foo ? bar : baz
9364 // ^^^^^^^^^^^^^^^
9365 const pm_if_node_t *cast = (const pm_if_node_t *) node;
9366 pm_compile_conditional(iseq, &location, PM_IF_NODE, (const pm_node_t *) cast, cast->statements, cast->subsequent, cast->predicate, ret, popped, scope_node);
9367 return;
9368 }
9369 case PM_IMAGINARY_NODE: {
9370 // 1i
9371 // ^^
9372 if (!popped) {
9373 VALUE operand = parse_imaginary((const pm_imaginary_node_t *) node);
9374 PUSH_INSN1(ret, location, putobject, operand);
9375 }
9376 return;
9377 }
9378 case PM_IMPLICIT_NODE: {
9379 // Implicit nodes mark places in the syntax tree where explicit syntax
9380 // was omitted, but implied. For example,
9381 //
9382 // { foo: }
9383 //
9384 // In this case a method call/local variable read is implied by virtue
9385 // of the missing value. To compile these nodes, we simply compile the
9386 // value that is implied, which is helpfully supplied by the parser.
9387 const pm_implicit_node_t *cast = (const pm_implicit_node_t *) node;
9388 PM_COMPILE(cast->value);
9389 return;
9390 }
9391 case PM_IN_NODE: {
9392 // In nodes are handled by the case match node directly, so we should
9393 // never end up hitting them through this path.
9394 rb_bug("Should not ever enter an in node directly");
9395 return;
9396 }
9397 case PM_INDEX_OPERATOR_WRITE_NODE: {
9398 // foo[bar] += baz
9399 // ^^^^^^^^^^^^^^^
9401 pm_compile_index_operator_write_node(iseq, cast, &location, ret, popped, scope_node);
9402 return;
9403 }
9404 case PM_INDEX_AND_WRITE_NODE: {
9405 // foo[bar] &&= baz
9406 // ^^^^^^^^^^^^^^^^
9407 const pm_index_and_write_node_t *cast = (const pm_index_and_write_node_t *) node;
9408 pm_compile_index_control_flow_write_node(iseq, node, cast->receiver, cast->arguments, cast->block, cast->value, &location, ret, popped, scope_node);
9409 return;
9410 }
9411 case PM_INDEX_OR_WRITE_NODE: {
9412 // foo[bar] ||= baz
9413 // ^^^^^^^^^^^^^^^^
9414 const pm_index_or_write_node_t *cast = (const pm_index_or_write_node_t *) node;
9415 pm_compile_index_control_flow_write_node(iseq, node, cast->receiver, cast->arguments, cast->block, cast->value, &location, ret, popped, scope_node);
9416 return;
9417 }
9418 case PM_INSTANCE_VARIABLE_AND_WRITE_NODE: {
9419 // @foo &&= bar
9420 // ^^^^^^^^^^^^
9422 LABEL *end_label = NEW_LABEL(location.line);
9423
9424 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
9425 VALUE name = ID2SYM(name_id);
9426
9427 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9428 if (!popped) PUSH_INSN(ret, location, dup);
9429
9430 PUSH_INSNL(ret, location, branchunless, end_label);
9431 if (!popped) PUSH_INSN(ret, location, pop);
9432
9433 PM_COMPILE_NOT_POPPED(cast->value);
9434 if (!popped) PUSH_INSN(ret, location, dup);
9435
9436 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9437 PUSH_LABEL(ret, end_label);
9438
9439 return;
9440 }
9441 case PM_INSTANCE_VARIABLE_OPERATOR_WRITE_NODE: {
9442 // @foo += bar
9443 // ^^^^^^^^^^^
9445
9446 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
9447 VALUE name = ID2SYM(name_id);
9448
9449 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9450 PM_COMPILE_NOT_POPPED(cast->value);
9451
9452 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
9453 int flags = VM_CALL_ARGS_SIMPLE;
9454 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(flags));
9455
9456 if (!popped) PUSH_INSN(ret, location, dup);
9457 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9458
9459 return;
9460 }
9461 case PM_INSTANCE_VARIABLE_OR_WRITE_NODE: {
9462 // @foo ||= bar
9463 // ^^^^^^^^^^^^
9465 LABEL *end_label = NEW_LABEL(location.line);
9466
9467 ID name_id = pm_constant_id_lookup(scope_node, cast->name);
9468 VALUE name = ID2SYM(name_id);
9469
9470 PUSH_INSN2(ret, location, getinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9471 if (!popped) PUSH_INSN(ret, location, dup);
9472
9473 PUSH_INSNL(ret, location, branchif, end_label);
9474 if (!popped) PUSH_INSN(ret, location, pop);
9475
9476 PM_COMPILE_NOT_POPPED(cast->value);
9477 if (!popped) PUSH_INSN(ret, location, dup);
9478
9479 PUSH_INSN2(ret, location, setinstancevariable, name, get_ivar_ic_value(iseq, name_id));
9480 PUSH_LABEL(ret, end_label);
9481
9482 return;
9483 }
9484 case PM_INSTANCE_VARIABLE_READ_NODE: {
9485 // @foo
9486 // ^^^^
9487 if (!popped) {
9489 ID name = pm_constant_id_lookup(scope_node, cast->name);
9490 PUSH_INSN2(ret, location, getinstancevariable, ID2SYM(name), get_ivar_ic_value(iseq, name));
9491 }
9492 return;
9493 }
9494 case PM_INSTANCE_VARIABLE_WRITE_NODE: {
9495 // @foo = 1
9496 // ^^^^^^^^
9498 PM_COMPILE_NOT_POPPED(cast->value);
9499 if (!popped) PUSH_INSN(ret, location, dup);
9500
9501 ID name = pm_constant_id_lookup(scope_node, cast->name);
9502 PUSH_INSN2(ret, location, setinstancevariable, ID2SYM(name), get_ivar_ic_value(iseq, name));
9503
9504 return;
9505 }
9506 case PM_INTEGER_NODE: {
9507 // 1
9508 // ^
9509 if (!popped) {
9510 VALUE operand = parse_integer((const pm_integer_node_t *) node);
9511 PUSH_INSN1(ret, location, putobject, operand);
9512 }
9513 return;
9514 }
9515 case PM_INTERPOLATED_MATCH_LAST_LINE_NODE: {
9516 // if /foo #{bar}/ then end
9517 // ^^^^^^^^^^^^
9518 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
9519 if (!popped) {
9520 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
9521 PUSH_INSN1(ret, location, putobject, regexp);
9522 }
9523 }
9524 else {
9525 pm_compile_regexp_dynamic(iseq, node, &((const pm_interpolated_match_last_line_node_t *) node)->parts, &location, ret, popped, scope_node);
9526 }
9527
9528 PUSH_INSN1(ret, location, getglobal, rb_id2sym(idLASTLINE));
9529 PUSH_SEND(ret, location, idEqTilde, INT2NUM(1));
9530 if (popped) PUSH_INSN(ret, location, pop);
9531
9532 return;
9533 }
9534 case PM_INTERPOLATED_REGULAR_EXPRESSION_NODE: {
9535 // /foo #{bar}/
9536 // ^^^^^^^^^^^^
9537 if (PM_NODE_FLAG_P(node, PM_REGULAR_EXPRESSION_FLAGS_ONCE)) {
9538 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
9539 const rb_iseq_t *block_iseq = NULL;
9540 int ise_index = ISEQ_BODY(iseq)->ise_size++;
9541
9542 pm_scope_node_t next_scope_node;
9543 pm_scope_node_init(node, &next_scope_node, scope_node);
9544
9545 block_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_PLAIN, location.line);
9546 pm_scope_node_destroy(&next_scope_node);
9547
9548 ISEQ_COMPILE_DATA(iseq)->current_block = block_iseq;
9549 PUSH_INSN2(ret, location, once, block_iseq, INT2FIX(ise_index));
9550 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
9551
9552 if (popped) PUSH_INSN(ret, location, pop);
9553 return;
9554 }
9555
9556 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
9557 if (!popped) {
9558 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
9559 PUSH_INSN1(ret, location, putobject, regexp);
9560 }
9561 }
9562 else {
9563 pm_compile_regexp_dynamic(iseq, node, &((const pm_interpolated_regular_expression_node_t *) node)->parts, &location, ret, popped, scope_node);
9564 if (popped) PUSH_INSN(ret, location, pop);
9565 }
9566
9567 return;
9568 }
9569 case PM_INTERPOLATED_STRING_NODE: {
9570 // "foo #{bar}"
9571 // ^^^^^^^^^^^^
9572 if (PM_NODE_FLAG_P(node, PM_NODE_FLAG_STATIC_LITERAL)) {
9573 if (!popped) {
9574 VALUE string = pm_static_literal_value(iseq, node, scope_node);
9575
9576 if (PM_NODE_FLAG_P(node, PM_INTERPOLATED_STRING_NODE_FLAGS_FROZEN)) {
9577 PUSH_INSN1(ret, location, putobject, string);
9578 }
9579 else if (PM_NODE_FLAG_P(node, PM_INTERPOLATED_STRING_NODE_FLAGS_MUTABLE)) {
9580 PUSH_INSN1(ret, location, putstring, string);
9581 }
9582 else {
9583 PUSH_INSN1(ret, location, putchilledstring, string);
9584 }
9585 }
9586 }
9587 else {
9589 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, popped, scope_node, NULL, NULL, PM_NODE_FLAG_P(cast, PM_INTERPOLATED_STRING_NODE_FLAGS_MUTABLE), PM_NODE_FLAG_P(cast, PM_INTERPOLATED_STRING_NODE_FLAGS_FROZEN));
9590 if (length > 1) PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
9591 if (popped) PUSH_INSN(ret, location, pop);
9592 }
9593
9594 return;
9595 }
9596 case PM_INTERPOLATED_SYMBOL_NODE: {
9597 // :"foo #{bar}"
9598 // ^^^^^^^^^^^^^
9600 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, popped, scope_node, NULL, NULL, false, false);
9601
9602 if (length > 1) {
9603 PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
9604 }
9605
9606 if (!popped) {
9607 PUSH_INSN(ret, location, intern);
9608 }
9609 else {
9610 PUSH_INSN(ret, location, pop);
9611 }
9612
9613 return;
9614 }
9615 case PM_INTERPOLATED_X_STRING_NODE: {
9616 // `foo #{bar}`
9617 // ^^^^^^^^^^^^
9619
9620 PUSH_INSN(ret, location, putself);
9621
9622 int length = pm_interpolated_node_compile(iseq, &cast->parts, &location, ret, false, scope_node, NULL, NULL, false, false);
9623 if (length > 1) PUSH_INSN1(ret, location, concatstrings, INT2FIX(length));
9624
9625 PUSH_SEND_WITH_FLAG(ret, location, idBackquote, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
9626 if (popped) PUSH_INSN(ret, location, pop);
9627
9628 return;
9629 }
9630 case PM_IT_LOCAL_VARIABLE_READ_NODE: {
9631 // -> { it }
9632 // ^^
9633 if (!popped) {
9634 pm_scope_node_t *current_scope_node = scope_node;
9635 int level = 0;
9636
9637 while (current_scope_node) {
9638 if (current_scope_node->parameters && PM_NODE_TYPE_P(current_scope_node->parameters, PM_IT_PARAMETERS_NODE)) {
9639 PUSH_GETLOCAL(ret, location, current_scope_node->local_table_for_iseq_size, level);
9640 return;
9641 }
9642
9643 current_scope_node = current_scope_node->previous;
9644 level++;
9645 }
9646 rb_bug("Local `it` does not exist");
9647 }
9648
9649 return;
9650 }
9651 case PM_KEYWORD_HASH_NODE: {
9652 // foo(bar: baz)
9653 // ^^^^^^^^
9654 const pm_keyword_hash_node_t *cast = (const pm_keyword_hash_node_t *) node;
9655 const pm_node_list_t *elements = &cast->elements;
9656
9657 const pm_node_t *element;
9658 PM_NODE_LIST_FOREACH(elements, index, element) {
9659 PM_COMPILE(element);
9660 }
9661
9662 if (!popped) PUSH_INSN1(ret, location, newhash, INT2FIX(elements->size * 2));
9663 return;
9664 }
9665 case PM_LAMBDA_NODE: {
9666 // -> {}
9667 // ^^^^^
9668 const pm_lambda_node_t *cast = (const pm_lambda_node_t *) node;
9669
9670 pm_scope_node_t next_scope_node;
9671 pm_scope_node_init(node, &next_scope_node, scope_node);
9672
9673 int opening_lineno = pm_location_line_number(parser, &cast->opening_loc);
9674 const rb_iseq_t *block = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, opening_lineno);
9675 pm_scope_node_destroy(&next_scope_node);
9676
9677 VALUE argc = INT2FIX(0);
9678 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
9679 PUSH_CALL_WITH_BLOCK(ret, location, idLambda, argc, block);
9680 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) block);
9681
9682 if (popped) PUSH_INSN(ret, location, pop);
9683 return;
9684 }
9685 case PM_LOCAL_VARIABLE_AND_WRITE_NODE: {
9686 // foo &&= bar
9687 // ^^^^^^^^^^^
9689 LABEL *end_label = NEW_LABEL(location.line);
9690
9691 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9692 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
9693 if (!popped) PUSH_INSN(ret, location, dup);
9694
9695 PUSH_INSNL(ret, location, branchunless, end_label);
9696 if (!popped) PUSH_INSN(ret, location, pop);
9697
9698 PM_COMPILE_NOT_POPPED(cast->value);
9699 if (!popped) PUSH_INSN(ret, location, dup);
9700
9701 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
9702 PUSH_LABEL(ret, end_label);
9703
9704 return;
9705 }
9706 case PM_LOCAL_VARIABLE_OPERATOR_WRITE_NODE: {
9707 // foo += bar
9708 // ^^^^^^^^^^
9710
9711 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9712 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
9713
9714 PM_COMPILE_NOT_POPPED(cast->value);
9715
9716 ID method_id = pm_constant_id_lookup(scope_node, cast->binary_operator);
9717 PUSH_SEND_WITH_FLAG(ret, location, method_id, INT2NUM(1), INT2FIX(VM_CALL_ARGS_SIMPLE));
9718
9719 if (!popped) PUSH_INSN(ret, location, dup);
9720 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
9721
9722 return;
9723 }
9724 case PM_LOCAL_VARIABLE_OR_WRITE_NODE: {
9725 // foo ||= bar
9726 // ^^^^^^^^^^^
9728
9729 LABEL *set_label = NEW_LABEL(location.line);
9730 LABEL *end_label = NEW_LABEL(location.line);
9731
9732 PUSH_INSN1(ret, location, putobject, Qtrue);
9733 PUSH_INSNL(ret, location, branchunless, set_label);
9734
9735 pm_local_index_t local_index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9736 PUSH_GETLOCAL(ret, location, local_index.index, local_index.level);
9737 if (!popped) PUSH_INSN(ret, location, dup);
9738
9739 PUSH_INSNL(ret, location, branchif, end_label);
9740 if (!popped) PUSH_INSN(ret, location, pop);
9741
9742 PUSH_LABEL(ret, set_label);
9743 PM_COMPILE_NOT_POPPED(cast->value);
9744 if (!popped) PUSH_INSN(ret, location, dup);
9745
9746 PUSH_SETLOCAL(ret, location, local_index.index, local_index.level);
9747 PUSH_LABEL(ret, end_label);
9748
9749 return;
9750 }
9751 case PM_LOCAL_VARIABLE_READ_NODE: {
9752 // foo
9753 // ^^^
9754 if (!popped) {
9756 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9757 PUSH_GETLOCAL(ret, location, index.index, index.level);
9758 }
9759
9760 return;
9761 }
9762 case PM_LOCAL_VARIABLE_WRITE_NODE: {
9763 // foo = 1
9764 // ^^^^^^^
9766 PM_COMPILE_NOT_POPPED(cast->value);
9767 if (!popped) PUSH_INSN(ret, location, dup);
9768
9769 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, cast->depth);
9770 PUSH_SETLOCAL(ret, location, index.index, index.level);
9771 return;
9772 }
9773 case PM_MATCH_LAST_LINE_NODE: {
9774 // if /foo/ then end
9775 // ^^^^^
9776 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
9777
9778 PUSH_INSN1(ret, location, putobject, regexp);
9779 PUSH_INSN2(ret, location, getspecial, INT2FIX(0), INT2FIX(0));
9780 PUSH_SEND(ret, location, idEqTilde, INT2NUM(1));
9781 if (popped) PUSH_INSN(ret, location, pop);
9782
9783 return;
9784 }
9785 case PM_MATCH_PREDICATE_NODE: {
9786 // foo in bar
9787 // ^^^^^^^^^^
9788 const pm_match_predicate_node_t *cast = (const pm_match_predicate_node_t *) node;
9789
9790 // First, allocate some stack space for the cached return value of any
9791 // calls to #deconstruct.
9792 PUSH_INSN(ret, location, putnil);
9793
9794 // Next, compile the expression that we're going to match against.
9795 PM_COMPILE_NOT_POPPED(cast->value);
9796 PUSH_INSN(ret, location, dup);
9797
9798 // Now compile the pattern that is going to be used to match against the
9799 // expression.
9800 LABEL *matched_label = NEW_LABEL(location.line);
9801 LABEL *unmatched_label = NEW_LABEL(location.line);
9802 LABEL *done_label = NEW_LABEL(location.line);
9803 pm_compile_pattern(iseq, scope_node, cast->pattern, ret, matched_label, unmatched_label, false, true, 2);
9804
9805 // If the pattern did not match, then compile the necessary instructions
9806 // to handle pushing false onto the stack, then jump to the end.
9807 PUSH_LABEL(ret, unmatched_label);
9808 PUSH_INSN(ret, location, pop);
9809 PUSH_INSN(ret, location, pop);
9810
9811 if (!popped) PUSH_INSN1(ret, location, putobject, Qfalse);
9812 PUSH_INSNL(ret, location, jump, done_label);
9813 PUSH_INSN(ret, location, putnil);
9814
9815 // If the pattern did match, then compile the necessary instructions to
9816 // handle pushing true onto the stack, then jump to the end.
9817 PUSH_LABEL(ret, matched_label);
9818 PUSH_INSN1(ret, location, adjuststack, INT2FIX(2));
9819 if (!popped) PUSH_INSN1(ret, location, putobject, Qtrue);
9820 PUSH_INSNL(ret, location, jump, done_label);
9821
9822 PUSH_LABEL(ret, done_label);
9823 return;
9824 }
9825 case PM_MATCH_REQUIRED_NODE:
9826 // foo => bar
9827 // ^^^^^^^^^^
9828 //
9829 // A match required node represents pattern matching against a single
9830 // pattern using the => operator. For example,
9831 //
9832 // foo => bar
9833 //
9834 // This is somewhat analogous to compiling a case match statement with a
9835 // single pattern. In both cases, if the pattern fails it should
9836 // immediately raise an error.
9837 pm_compile_match_required_node(iseq, (const pm_match_required_node_t *) node, &location, ret, popped, scope_node);
9838 return;
9839 case PM_MATCH_WRITE_NODE:
9840 // /(?<foo>foo)/ =~ bar
9841 // ^^^^^^^^^^^^^^^^^^^^
9842 //
9843 // Match write nodes are specialized call nodes that have a regular
9844 // expression with valid named capture groups on the left, the =~
9845 // operator, and some value on the right. The nodes themselves simply
9846 // wrap the call with the local variable targets that will be written
9847 // when the call is executed.
9848 pm_compile_match_write_node(iseq, (const pm_match_write_node_t *) node, &location, ret, popped, scope_node);
9849 return;
9850 case PM_MISSING_NODE:
9851 rb_bug("A pm_missing_node_t should not exist in prism's AST.");
9852 return;
9853 case PM_MODULE_NODE: {
9854 // module Foo; end
9855 // ^^^^^^^^^^^^^^^
9856 const pm_module_node_t *cast = (const pm_module_node_t *) node;
9857
9858 ID module_id = pm_constant_id_lookup(scope_node, cast->name);
9859 VALUE module_name = rb_str_freeze(rb_sprintf("<module:%"PRIsVALUE">", rb_id2str(module_id)));
9860
9861 pm_scope_node_t next_scope_node;
9862 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
9863
9864 const rb_iseq_t *module_iseq = NEW_CHILD_ISEQ(&next_scope_node, module_name, ISEQ_TYPE_CLASS, location.line);
9865 pm_scope_node_destroy(&next_scope_node);
9866
9867 const int flags = VM_DEFINECLASS_TYPE_MODULE | pm_compile_class_path(iseq, cast->constant_path, &location, ret, false, scope_node);
9868 PUSH_INSN(ret, location, putnil);
9869 PUSH_INSN3(ret, location, defineclass, ID2SYM(module_id), module_iseq, INT2FIX(flags));
9870 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) module_iseq);
9871
9872 if (popped) PUSH_INSN(ret, location, pop);
9873 return;
9874 }
9875 case PM_REQUIRED_PARAMETER_NODE: {
9876 // def foo(bar); end
9877 // ^^^
9879 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, 0);
9880
9881 PUSH_SETLOCAL(ret, location, index.index, index.level);
9882 return;
9883 }
9884 case PM_MULTI_WRITE_NODE: {
9885 // foo, bar = baz
9886 // ^^^^^^^^^^^^^^
9887 //
9888 // A multi write node represents writing to multiple values using an =
9889 // operator. Importantly these nodes are only parsed when the left-hand
9890 // side of the operator has multiple targets. The right-hand side of the
9891 // operator having multiple targets represents an implicit array
9892 // instead.
9893 const pm_multi_write_node_t *cast = (const pm_multi_write_node_t *) node;
9894
9895 DECL_ANCHOR(writes);
9896 DECL_ANCHOR(cleanup);
9897
9898 pm_multi_target_state_t state = { 0 };
9899 state.position = popped ? 0 : 1;
9900 pm_compile_multi_target_node(iseq, node, ret, writes, cleanup, scope_node, &state);
9901
9902 PM_COMPILE_NOT_POPPED(cast->value);
9903 if (!popped) PUSH_INSN(ret, location, dup);
9904
9905 PUSH_SEQ(ret, writes);
9906 if (!popped && state.stack_size >= 1) {
9907 // Make sure the value on the right-hand side of the = operator is
9908 // being returned before we pop the parent expressions.
9909 PUSH_INSN1(ret, location, setn, INT2FIX(state.stack_size));
9910 }
9911
9912 // Now, we need to go back and modify the topn instructions in order to
9913 // ensure they can correctly retrieve the parent expressions.
9914 pm_multi_target_state_update(&state);
9915
9916 PUSH_SEQ(ret, cleanup);
9917 return;
9918 }
9919 case PM_NEXT_NODE:
9920 // next
9921 // ^^^^
9922 //
9923 // next foo
9924 // ^^^^^^^^
9925 pm_compile_next_node(iseq, (const pm_next_node_t *) node, &location, ret, popped, scope_node);
9926 return;
9927 case PM_NIL_NODE: {
9928 // nil
9929 // ^^^
9930 if (!popped) {
9931 PUSH_INSN(ret, location, putnil);
9932 }
9933
9934 return;
9935 }
9936 case PM_NO_KEYWORDS_PARAMETER_NODE: {
9937 // def foo(**nil); end
9938 // ^^^^^
9939 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg = TRUE;
9940 return;
9941 }
9942 case PM_NUMBERED_REFERENCE_READ_NODE: {
9943 // $1
9944 // ^^
9945 if (!popped) {
9947
9948 if (cast->number != 0) {
9949 VALUE ref = pm_compile_numbered_reference_ref(cast);
9950 PUSH_INSN2(ret, location, getspecial, INT2FIX(1), ref);
9951 }
9952 else {
9953 PUSH_INSN(ret, location, putnil);
9954 }
9955 }
9956
9957 return;
9958 }
9959 case PM_OR_NODE: {
9960 // a or b
9961 // ^^^^^^
9962 const pm_or_node_t *cast = (const pm_or_node_t *) node;
9963
9964 LABEL *end_label = NEW_LABEL(location.line);
9965 PM_COMPILE_NOT_POPPED(cast->left);
9966
9967 if (!popped) PUSH_INSN(ret, location, dup);
9968 PUSH_INSNL(ret, location, branchif, end_label);
9969
9970 if (!popped) PUSH_INSN(ret, location, pop);
9971 PM_COMPILE(cast->right);
9972 PUSH_LABEL(ret, end_label);
9973
9974 return;
9975 }
9976 case PM_OPTIONAL_PARAMETER_NODE: {
9977 // def foo(bar = 1); end
9978 // ^^^^^^^
9980 PM_COMPILE_NOT_POPPED(cast->value);
9981
9982 pm_local_index_t index = pm_lookup_local_index(iseq, scope_node, cast->name, 0);
9983 PUSH_SETLOCAL(ret, location, index.index, index.level);
9984
9985 return;
9986 }
9987 case PM_PARENTHESES_NODE: {
9988 // ()
9989 // ^^
9990 //
9991 // (1)
9992 // ^^^
9993 const pm_parentheses_node_t *cast = (const pm_parentheses_node_t *) node;
9994
9995 if (cast->body != NULL) {
9996 PM_COMPILE(cast->body);
9997 }
9998 else if (!popped) {
9999 PUSH_INSN(ret, location, putnil);
10000 }
10001
10002 return;
10003 }
10004 case PM_PRE_EXECUTION_NODE: {
10005 // BEGIN {}
10006 // ^^^^^^^^
10007 const pm_pre_execution_node_t *cast = (const pm_pre_execution_node_t *) node;
10008
10009 LINK_ANCHOR *outer_pre = scope_node->pre_execution_anchor;
10010 RUBY_ASSERT(outer_pre != NULL);
10011
10012 // BEGIN{} nodes can be nested, so here we're going to do the same thing
10013 // that we did for the top-level compilation where we create two
10014 // anchors and then join them in the correct order into the resulting
10015 // anchor.
10016 DECL_ANCHOR(inner_pre);
10017 scope_node->pre_execution_anchor = inner_pre;
10018
10019 DECL_ANCHOR(inner_body);
10020
10021 if (cast->statements != NULL) {
10022 const pm_node_list_t *body = &cast->statements->body;
10023
10024 for (size_t index = 0; index < body->size; index++) {
10025 pm_compile_node(iseq, body->nodes[index], inner_body, true, scope_node);
10026 }
10027 }
10028
10029 if (!popped) {
10030 PUSH_INSN(inner_body, location, putnil);
10031 }
10032
10033 // Now that everything has been compiled, join both anchors together
10034 // into the correct outer pre execution anchor, and reset the value so
10035 // that subsequent BEGIN{} nodes can be compiled correctly.
10036 PUSH_SEQ(outer_pre, inner_pre);
10037 PUSH_SEQ(outer_pre, inner_body);
10038 scope_node->pre_execution_anchor = outer_pre;
10039
10040 return;
10041 }
10042 case PM_POST_EXECUTION_NODE: {
10043 // END {}
10044 // ^^^^^^
10045 const rb_iseq_t *child_iseq;
10046 const rb_iseq_t *prevblock = ISEQ_COMPILE_DATA(iseq)->current_block;
10047
10048 pm_scope_node_t next_scope_node;
10049 pm_scope_node_init(node, &next_scope_node, scope_node);
10050 child_iseq = NEW_CHILD_ISEQ(&next_scope_node, make_name_for_block(iseq), ISEQ_TYPE_BLOCK, lineno);
10051 pm_scope_node_destroy(&next_scope_node);
10052
10053 ISEQ_COMPILE_DATA(iseq)->current_block = child_iseq;
10054
10055 int is_index = ISEQ_BODY(iseq)->ise_size++;
10056 PUSH_INSN2(ret, location, once, child_iseq, INT2FIX(is_index));
10057 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) child_iseq);
10058 if (popped) PUSH_INSN(ret, location, pop);
10059
10060 ISEQ_COMPILE_DATA(iseq)->current_block = prevblock;
10061
10062 return;
10063 }
10064 case PM_RANGE_NODE: {
10065 // 0..5
10066 // ^^^^
10067 const pm_range_node_t *cast = (const pm_range_node_t *) node;
10068 bool exclude_end = PM_NODE_FLAG_P(cast, PM_RANGE_FLAGS_EXCLUDE_END);
10069
10070 if (pm_optimizable_range_item_p(cast->left) && pm_optimizable_range_item_p(cast->right)) {
10071 if (!popped) {
10072 const pm_node_t *left = cast->left;
10073 const pm_node_t *right = cast->right;
10074
10075 VALUE val = rb_range_new(
10076 (left && PM_NODE_TYPE_P(left, PM_INTEGER_NODE)) ? parse_integer((const pm_integer_node_t *) left) : Qnil,
10077 (right && PM_NODE_TYPE_P(right, PM_INTEGER_NODE)) ? parse_integer((const pm_integer_node_t *) right) : Qnil,
10078 exclude_end
10079 );
10080
10081 RB_OBJ_SET_SHAREABLE(val);
10082 PUSH_INSN1(ret, location, putobject, val);
10083 }
10084 }
10085 else {
10086 if (cast->left != NULL) {
10087 PM_COMPILE(cast->left);
10088 }
10089 else if (!popped) {
10090 PUSH_INSN(ret, location, putnil);
10091 }
10092
10093 if (cast->right != NULL) {
10094 PM_COMPILE(cast->right);
10095 }
10096 else if (!popped) {
10097 PUSH_INSN(ret, location, putnil);
10098 }
10099
10100 if (!popped) {
10101 PUSH_INSN1(ret, location, newrange, INT2FIX(exclude_end ? 1 : 0));
10102 }
10103 }
10104 return;
10105 }
10106 case PM_RATIONAL_NODE: {
10107 // 1r
10108 // ^^
10109 if (!popped) {
10110 PUSH_INSN1(ret, location, putobject, parse_rational((const pm_rational_node_t *) node));
10111 }
10112 return;
10113 }
10114 case PM_REDO_NODE:
10115 // redo
10116 // ^^^^
10117 pm_compile_redo_node(iseq, &location, ret, popped, scope_node);
10118 return;
10119 case PM_REGULAR_EXPRESSION_NODE: {
10120 // /foo/
10121 // ^^^^^
10122 if (!popped) {
10123 VALUE regexp = pm_static_literal_value(iseq, node, scope_node);
10124 PUSH_INSN1(ret, location, putobject, regexp);
10125 }
10126 return;
10127 }
10128 case PM_RESCUE_NODE:
10129 // begin; rescue; end
10130 // ^^^^^^^
10131 pm_compile_rescue_node(iseq, (const pm_rescue_node_t *) node, &location, ret, popped, scope_node);
10132 return;
10133 case PM_RESCUE_MODIFIER_NODE: {
10134 // foo rescue bar
10135 // ^^^^^^^^^^^^^^
10136 const pm_rescue_modifier_node_t *cast = (const pm_rescue_modifier_node_t *) node;
10137
10138 pm_scope_node_t rescue_scope_node;
10139 pm_scope_node_init((const pm_node_t *) cast, &rescue_scope_node, scope_node);
10140
10141 rb_iseq_t *rescue_iseq = NEW_CHILD_ISEQ(
10142 &rescue_scope_node,
10143 rb_str_concat(rb_str_new2("rescue in "), ISEQ_BODY(iseq)->location.label),
10144 ISEQ_TYPE_RESCUE,
10145 pm_node_line_number(parser, cast->rescue_expression)
10146 );
10147
10148 pm_scope_node_destroy(&rescue_scope_node);
10149
10150 LABEL *lstart = NEW_LABEL(location.line);
10151 LABEL *lend = NEW_LABEL(location.line);
10152 LABEL *lcont = NEW_LABEL(location.line);
10153
10154 lstart->rescued = LABEL_RESCUE_BEG;
10155 lend->rescued = LABEL_RESCUE_END;
10156
10157 PUSH_LABEL(ret, lstart);
10158 PM_COMPILE_NOT_POPPED(cast->expression);
10159 PUSH_LABEL(ret, lend);
10160
10161 PUSH_INSN(ret, location, nop);
10162 PUSH_LABEL(ret, lcont);
10163 if (popped) PUSH_INSN(ret, location, pop);
10164
10165 PUSH_CATCH_ENTRY(CATCH_TYPE_RESCUE, lstart, lend, rescue_iseq, lcont);
10166 PUSH_CATCH_ENTRY(CATCH_TYPE_RETRY, lend, lcont, NULL, lstart);
10167 return;
10168 }
10169 case PM_RETURN_NODE:
10170 // return
10171 // ^^^^^^
10172 //
10173 // return 1
10174 // ^^^^^^^^
10175 pm_compile_return_node(iseq, (const pm_return_node_t *) node, &location, ret, popped, scope_node);
10176 return;
10177 case PM_RETRY_NODE: {
10178 // retry
10179 // ^^^^^
10180 if (ISEQ_BODY(iseq)->type == ISEQ_TYPE_RESCUE) {
10181 PUSH_INSN(ret, location, putnil);
10182 PUSH_INSN1(ret, location, throw, INT2FIX(TAG_RETRY));
10183 if (popped) PUSH_INSN(ret, location, pop);
10184 }
10185 else {
10186 COMPILE_ERROR(iseq, location.line, "Invalid retry");
10187 return;
10188 }
10189 return;
10190 }
10191 case PM_SCOPE_NODE:
10192 pm_compile_scope_node(iseq, (pm_scope_node_t *) node, &location, ret, popped);
10193 return;
10194 case PM_SELF_NODE: {
10195 // self
10196 // ^^^^
10197 if (!popped) {
10198 PUSH_INSN(ret, location, putself);
10199 }
10200 return;
10201 }
10202 case PM_SHAREABLE_CONSTANT_NODE: {
10203 // A value that is being written to a constant that is being marked as
10204 // shared depending on the current lexical context.
10206 pm_node_flags_t shareability = (cast->base.flags & (PM_SHAREABLE_CONSTANT_NODE_FLAGS_LITERAL | PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_EVERYTHING | PM_SHAREABLE_CONSTANT_NODE_FLAGS_EXPERIMENTAL_COPY));
10207
10208 switch (PM_NODE_TYPE(cast->write)) {
10209 case PM_CONSTANT_WRITE_NODE:
10210 pm_compile_constant_write_node(iseq, (const pm_constant_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10211 break;
10212 case PM_CONSTANT_AND_WRITE_NODE:
10213 pm_compile_constant_and_write_node(iseq, (const pm_constant_and_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10214 break;
10215 case PM_CONSTANT_OR_WRITE_NODE:
10216 pm_compile_constant_or_write_node(iseq, (const pm_constant_or_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10217 break;
10218 case PM_CONSTANT_OPERATOR_WRITE_NODE:
10219 pm_compile_constant_operator_write_node(iseq, (const pm_constant_operator_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10220 break;
10221 case PM_CONSTANT_PATH_WRITE_NODE:
10222 pm_compile_constant_path_write_node(iseq, (const pm_constant_path_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10223 break;
10224 case PM_CONSTANT_PATH_AND_WRITE_NODE:
10225 pm_compile_constant_path_and_write_node(iseq, (const pm_constant_path_and_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10226 break;
10227 case PM_CONSTANT_PATH_OR_WRITE_NODE:
10228 pm_compile_constant_path_or_write_node(iseq, (const pm_constant_path_or_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10229 break;
10230 case PM_CONSTANT_PATH_OPERATOR_WRITE_NODE:
10231 pm_compile_constant_path_operator_write_node(iseq, (const pm_constant_path_operator_write_node_t *) cast->write, shareability, &location, ret, popped, scope_node);
10232 break;
10233 default:
10234 rb_bug("Unexpected node type for shareable constant write: %s", pm_node_type_to_str(PM_NODE_TYPE(cast->write)));
10235 break;
10236 }
10237
10238 return;
10239 }
10240 case PM_SINGLETON_CLASS_NODE: {
10241 // class << self; end
10242 // ^^^^^^^^^^^^^^^^^^
10243 const pm_singleton_class_node_t *cast = (const pm_singleton_class_node_t *) node;
10244
10245 pm_scope_node_t next_scope_node;
10246 pm_scope_node_init((const pm_node_t *) cast, &next_scope_node, scope_node);
10247 const rb_iseq_t *child_iseq = NEW_ISEQ(&next_scope_node, rb_fstring_lit("singleton class"), ISEQ_TYPE_CLASS, location.line);
10248 pm_scope_node_destroy(&next_scope_node);
10249
10250 PM_COMPILE_NOT_POPPED(cast->expression);
10251 PUSH_INSN(ret, location, putnil);
10252
10253 ID singletonclass;
10254 CONST_ID(singletonclass, "singletonclass");
10255 PUSH_INSN3(ret, location, defineclass, ID2SYM(singletonclass), child_iseq, INT2FIX(VM_DEFINECLASS_TYPE_SINGLETON_CLASS));
10256
10257 if (popped) PUSH_INSN(ret, location, pop);
10258 RB_OBJ_WRITTEN(iseq, Qundef, (VALUE) child_iseq);
10259
10260 return;
10261 }
10262 case PM_SOURCE_ENCODING_NODE: {
10263 // __ENCODING__
10264 // ^^^^^^^^^^^^
10265 if (!popped) {
10266 VALUE value = pm_static_literal_value(iseq, node, scope_node);
10267 PUSH_INSN1(ret, location, putobject, value);
10268 }
10269 return;
10270 }
10271 case PM_SOURCE_FILE_NODE: {
10272 // __FILE__
10273 // ^^^^^^^^
10274 if (!popped) {
10275 const pm_source_file_node_t *cast = (const pm_source_file_node_t *) node;
10276 VALUE string = pm_source_file_value(cast, scope_node);
10277
10278 if (PM_NODE_FLAG_P(cast, PM_STRING_FLAGS_FROZEN)) {
10279 PUSH_INSN1(ret, location, putobject, string);
10280 }
10281 else if (PM_NODE_FLAG_P(cast, PM_STRING_FLAGS_MUTABLE)) {
10282 PUSH_INSN1(ret, location, putstring, string);
10283 }
10284 else {
10285 PUSH_INSN1(ret, location, putchilledstring, string);
10286 }
10287 }
10288 return;
10289 }
10290 case PM_SOURCE_LINE_NODE: {
10291 // __LINE__
10292 // ^^^^^^^^
10293 if (!popped) {
10294 VALUE value = pm_static_literal_value(iseq, node, scope_node);
10295 PUSH_INSN1(ret, location, putobject, value);
10296 }
10297 return;
10298 }
10299 case PM_SPLAT_NODE: {
10300 // foo(*bar)
10301 // ^^^^
10302 const pm_splat_node_t *cast = (const pm_splat_node_t *) node;
10303 if (cast->expression) {
10304 PM_COMPILE(cast->expression);
10305 }
10306
10307 if (!popped) {
10308 PUSH_INSN1(ret, location, splatarray, Qtrue);
10309 }
10310 return;
10311 }
10312 case PM_STATEMENTS_NODE: {
10313 // A list of statements.
10314 const pm_statements_node_t *cast = (const pm_statements_node_t *) node;
10315 const pm_node_list_t *body = &cast->body;
10316
10317 if (body->size > 0) {
10318 for (size_t index = 0; index < body->size - 1; index++) {
10319 PM_COMPILE_POPPED(body->nodes[index]);
10320 }
10321 PM_COMPILE(body->nodes[body->size - 1]);
10322 }
10323 else {
10324 PUSH_INSN(ret, location, putnil);
10325 }
10326 return;
10327 }
10328 case PM_STRING_NODE: {
10329 // "foo"
10330 // ^^^^^
10331 if (!popped) {
10332 const pm_string_node_t *cast = (const pm_string_node_t *) node;
10333 VALUE value = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
10334
10335 if (PM_NODE_FLAG_P(node, PM_STRING_FLAGS_FROZEN)) {
10336 PUSH_INSN1(ret, location, putobject, value);
10337 }
10338 else if (PM_NODE_FLAG_P(node, PM_STRING_FLAGS_MUTABLE)) {
10339 PUSH_INSN1(ret, location, putstring, value);
10340 }
10341 else {
10342 PUSH_INSN1(ret, location, putchilledstring, value);
10343 }
10344 }
10345 return;
10346 }
10347 case PM_SUPER_NODE:
10348 // super()
10349 // super(foo)
10350 // super(...)
10351 pm_compile_super_node(iseq, (const pm_super_node_t *) node, &location, ret, popped, scope_node);
10352 return;
10353 case PM_SYMBOL_NODE: {
10354 // :foo
10355 // ^^^^
10356 if (!popped) {
10357 VALUE value = pm_static_literal_value(iseq, node, scope_node);
10358 PUSH_INSN1(ret, location, putobject, value);
10359 }
10360 return;
10361 }
10362 case PM_TRUE_NODE: {
10363 // true
10364 // ^^^^
10365 if (!popped) {
10366 PUSH_INSN1(ret, location, putobject, Qtrue);
10367 }
10368 return;
10369 }
10370 case PM_UNDEF_NODE: {
10371 // undef foo
10372 // ^^^^^^^^^
10373 const pm_undef_node_t *cast = (const pm_undef_node_t *) node;
10374 const pm_node_list_t *names = &cast->names;
10375
10376 for (size_t index = 0; index < names->size; index++) {
10377 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_VMCORE));
10378 PUSH_INSN1(ret, location, putspecialobject, INT2FIX(VM_SPECIAL_OBJECT_CBASE));
10379
10380 PM_COMPILE_NOT_POPPED(names->nodes[index]);
10381 PUSH_SEND(ret, location, id_core_undef_method, INT2NUM(2));
10382
10383 if (index < names->size - 1) {
10384 PUSH_INSN(ret, location, pop);
10385 }
10386 }
10387
10388 if (popped) PUSH_INSN(ret, location, pop);
10389 return;
10390 }
10391 case PM_UNLESS_NODE: {
10392 // unless foo; bar end
10393 // ^^^^^^^^^^^^^^^^^^^
10394 //
10395 // bar unless foo
10396 // ^^^^^^^^^^^^^^
10397 const pm_unless_node_t *cast = (const pm_unless_node_t *) node;
10398 const pm_statements_node_t *statements = NULL;
10399 if (cast->else_clause != NULL) {
10400 statements = ((const pm_else_node_t *) cast->else_clause)->statements;
10401 }
10402
10403 pm_compile_conditional(iseq, &location, PM_UNLESS_NODE, (const pm_node_t *) cast, statements, (const pm_node_t *) cast->statements, cast->predicate, ret, popped, scope_node);
10404 return;
10405 }
10406 case PM_UNTIL_NODE: {
10407 // until foo; bar end
10408 // ^^^^^^^^^^^^^^^^^
10409 //
10410 // bar until foo
10411 // ^^^^^^^^^^^^^
10412 const pm_until_node_t *cast = (const pm_until_node_t *) node;
10413 pm_compile_loop(iseq, &location, cast->base.flags, PM_UNTIL_NODE, (const pm_node_t *) cast, cast->statements, cast->predicate, ret, popped, scope_node);
10414 return;
10415 }
10416 case PM_WHILE_NODE: {
10417 // while foo; bar end
10418 // ^^^^^^^^^^^^^^^^^^
10419 //
10420 // bar while foo
10421 // ^^^^^^^^^^^^^
10422 const pm_while_node_t *cast = (const pm_while_node_t *) node;
10423 pm_compile_loop(iseq, &location, cast->base.flags, PM_WHILE_NODE, (const pm_node_t *) cast, cast->statements, cast->predicate, ret, popped, scope_node);
10424 return;
10425 }
10426 case PM_X_STRING_NODE: {
10427 // `foo`
10428 // ^^^^^
10429 const pm_x_string_node_t *cast = (const pm_x_string_node_t *) node;
10430 VALUE value = parse_static_literal_string(iseq, scope_node, node, &cast->unescaped);
10431
10432 PUSH_INSN(ret, location, putself);
10433 PUSH_INSN1(ret, location, putobject, value);
10434 PUSH_SEND_WITH_FLAG(ret, location, idBackquote, INT2NUM(1), INT2FIX(VM_CALL_FCALL | VM_CALL_ARGS_SIMPLE));
10435 if (popped) PUSH_INSN(ret, location, pop);
10436
10437 return;
10438 }
10439 case PM_YIELD_NODE:
10440 // yield
10441 // ^^^^^
10442 //
10443 // yield 1
10444 // ^^^^^^^
10445 pm_compile_yield_node(iseq, (const pm_yield_node_t *) node, &location, ret, popped, scope_node);
10446 return;
10447 default:
10448 rb_raise(rb_eNotImpError, "node type %s not implemented", pm_node_type_to_str(PM_NODE_TYPE(node)));
10449 return;
10450 }
10451}
10452
10453#undef PM_CONTAINER_P
10454
10456static inline bool
10457pm_iseq_pre_execution_p(rb_iseq_t *iseq)
10458{
10459 switch (ISEQ_BODY(iseq)->type) {
10460 case ISEQ_TYPE_TOP:
10461 case ISEQ_TYPE_EVAL:
10462 case ISEQ_TYPE_MAIN:
10463 return true;
10464 default:
10465 return false;
10466 }
10467}
10468
10476VALUE
10477pm_iseq_compile_node(rb_iseq_t *iseq, pm_scope_node_t *node)
10478{
10479 DECL_ANCHOR(ret);
10480
10481 if (pm_iseq_pre_execution_p(iseq)) {
10482 // Because these ISEQs can have BEGIN{}, we're going to create two
10483 // anchors to compile them, a "pre" and a "body". We'll mark the "pre"
10484 // on the scope node so that when BEGIN{} is found, its contents will be
10485 // added to the "pre" anchor.
10486 DECL_ANCHOR(pre);
10487 node->pre_execution_anchor = pre;
10488
10489 // Now we'll compile the body as normal. We won't compile directly into
10490 // the "ret" anchor yet because we want to add the "pre" anchor to the
10491 // beginning of the "ret" anchor first.
10492 DECL_ANCHOR(body);
10493 pm_compile_node(iseq, (const pm_node_t *) node, body, false, node);
10494
10495 // Now we'll join both anchors together so that the content is in the
10496 // correct order.
10497 PUSH_SEQ(ret, pre);
10498 PUSH_SEQ(ret, body);
10499 }
10500 else {
10501 // In other circumstances, we can just compile the node directly into
10502 // the "ret" anchor.
10503 pm_compile_node(iseq, (const pm_node_t *) node, ret, false, node);
10504 }
10505
10506 CHECK(iseq_setup_insn(iseq, ret));
10507 return iseq_setup(iseq, ret);
10508}
10509
10514void
10515pm_parse_result_free(pm_parse_result_t *result)
10516{
10517 if (result->node.ast_node != NULL) {
10518 pm_node_destroy(&result->parser, result->node.ast_node);
10519 }
10520
10521 if (result->parsed) {
10522 xfree(result->node.constants);
10523 pm_scope_node_destroy(&result->node);
10524 }
10525
10526 pm_parser_free(&result->parser);
10527 pm_string_free(&result->input);
10528 pm_options_free(&result->options);
10529}
10530
10532typedef struct {
10535
10537 int32_t line;
10538
10541
10543 uint32_t column_end;
10545
10547typedef struct {
10549 const char *number_prefix;
10550
10552 const char *blank_prefix;
10553
10555 const char *divider;
10556
10559
10563
10564#define PM_COLOR_BOLD "\033[1m"
10565#define PM_COLOR_GRAY "\033[2m"
10566#define PM_COLOR_RED "\033[1;31m"
10567#define PM_COLOR_RESET "\033[m"
10568#define PM_ERROR_TRUNCATE 30
10569
10570static inline pm_parse_error_t *
10571pm_parse_errors_format_sort(const pm_parser_t *parser, const pm_list_t *error_list, const pm_newline_list_t *newline_list) {
10572 pm_parse_error_t *errors = xcalloc(error_list->size, sizeof(pm_parse_error_t));
10573 if (errors == NULL) return NULL;
10574
10575 int32_t start_line = parser->start_line;
10576 pm_diagnostic_t *finish = (pm_diagnostic_t * )error_list->tail->next;
10577
10578 for (pm_diagnostic_t *error = (pm_diagnostic_t *) error_list->head; error != finish; error = (pm_diagnostic_t *) error->node.next) {
10579 pm_line_column_t start = pm_newline_list_line_column(newline_list, error->location.start, start_line);
10580 pm_line_column_t end = pm_newline_list_line_column(newline_list, error->location.end, start_line);
10581
10582 // We're going to insert this error into the array in sorted order. We
10583 // do this by finding the first error that has a line number greater
10584 // than the current error and then inserting the current error before
10585 // that one.
10586 size_t index = 0;
10587 while (
10588 (index < error_list->size) &&
10589 (errors[index].error != NULL) &&
10590 (
10591 (errors[index].line < start.line) ||
10592 ((errors[index].line == start.line) && (errors[index].column_start < start.column))
10593 )
10594 ) index++;
10595
10596 // Now we're going to shift all of the errors after this one down one
10597 // index to make room for the new error.
10598 if (index + 1 < error_list->size) {
10599 memmove(&errors[index + 1], &errors[index], sizeof(pm_parse_error_t) * (error_list->size - index - 1));
10600 }
10601
10602 // Finally, we'll insert the error into the array.
10603 uint32_t column_end;
10604 if (start.line == end.line) {
10605 column_end = end.column;
10606 } else {
10607 column_end = (uint32_t) (newline_list->offsets[start.line - start_line + 1] - newline_list->offsets[start.line - start_line] - 1);
10608 }
10609
10610 // Ensure we have at least one column of error.
10611 if (start.column == column_end) column_end++;
10612
10613 errors[index] = (pm_parse_error_t) {
10614 .error = error,
10615 .line = start.line,
10616 .column_start = start.column,
10617 .column_end = column_end
10618 };
10619 }
10620
10621 return errors;
10622}
10623
10624/* Append a literal string to the buffer. */
10625#define pm_buffer_append_literal(buffer, str) pm_buffer_append_string(buffer, str, rb_strlen_lit(str))
10626
10627static inline void
10628pm_parse_errors_format_line(const pm_parser_t *parser, const pm_newline_list_t *newline_list, const char *number_prefix, int32_t line, uint32_t column_start, uint32_t column_end, pm_buffer_t *buffer) {
10629 int32_t line_delta = line - parser->start_line;
10630 assert(line_delta >= 0);
10631
10632 size_t index = (size_t) line_delta;
10633 assert(index < newline_list->size);
10634
10635 const uint8_t *start = &parser->start[newline_list->offsets[index]];
10636 const uint8_t *end;
10637
10638 if (index >= newline_list->size - 1) {
10639 end = parser->end;
10640 } else {
10641 end = &parser->start[newline_list->offsets[index + 1]];
10642 }
10643
10644 pm_buffer_append_format(buffer, number_prefix, line);
10645
10646 // Here we determine if we should truncate the end of the line.
10647 bool truncate_end = false;
10648 if ((column_end != 0) && ((end - (start + column_end)) >= PM_ERROR_TRUNCATE)) {
10649 const uint8_t *end_candidate = start + column_end + PM_ERROR_TRUNCATE;
10650
10651 for (const uint8_t *ptr = start; ptr < end_candidate;) {
10652 size_t char_width = parser->encoding->char_width(ptr, parser->end - ptr);
10653
10654 // If we failed to decode a character, then just bail out and
10655 // truncate at the fixed width.
10656 if (char_width == 0) break;
10657
10658 // If this next character would go past the end candidate,
10659 // then we need to truncate before it.
10660 if (ptr + char_width > end_candidate) {
10661 end_candidate = ptr;
10662 break;
10663 }
10664
10665 ptr += char_width;
10666 }
10667
10668 end = end_candidate;
10669 truncate_end = true;
10670 }
10671
10672 // Here we determine if we should truncate the start of the line.
10673 if (column_start >= PM_ERROR_TRUNCATE) {
10674 pm_buffer_append_string(buffer, "... ", 4);
10675 start += column_start;
10676 }
10677
10678 pm_buffer_append_string(buffer, (const char *) start, (size_t) (end - start));
10679
10680 if (truncate_end) {
10681 pm_buffer_append_string(buffer, " ...\n", 5);
10682 } else if (end == parser->end && end[-1] != '\n') {
10683 pm_buffer_append_string(buffer, "\n", 1);
10684 }
10685}
10686
10690static void
10691pm_parse_errors_format(const pm_parser_t *parser, const pm_list_t *error_list, pm_buffer_t *buffer, int highlight, bool inline_messages) {
10692 assert(error_list->size != 0);
10693
10694 // First, we're going to sort all of the errors by line number using an
10695 // insertion sort into a newly allocated array.
10696 const int32_t start_line = parser->start_line;
10697 const pm_newline_list_t *newline_list = &parser->newline_list;
10698
10699 pm_parse_error_t *errors = pm_parse_errors_format_sort(parser, error_list, newline_list);
10700 if (errors == NULL) return;
10701
10702 // Now we're going to determine how we're going to format line numbers and
10703 // blank lines based on the maximum number of digits in the line numbers
10704 // that are going to be displaid.
10705 pm_parse_error_format_t error_format;
10706 int32_t first_line_number = errors[0].line;
10707 int32_t last_line_number = errors[error_list->size - 1].line;
10708
10709 // If we have a maximum line number that is negative, then we're going to
10710 // use the absolute value for comparison but multiple by 10 to additionally
10711 // have a column for the negative sign.
10712 if (first_line_number < 0) first_line_number = (-first_line_number) * 10;
10713 if (last_line_number < 0) last_line_number = (-last_line_number) * 10;
10714 int32_t max_line_number = first_line_number > last_line_number ? first_line_number : last_line_number;
10715
10716 if (max_line_number < 10) {
10717 if (highlight > 0) {
10718 error_format = (pm_parse_error_format_t) {
10719 .number_prefix = PM_COLOR_GRAY "%1" PRIi32 " | " PM_COLOR_RESET,
10720 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10721 .divider = PM_COLOR_GRAY " ~~~~~" PM_COLOR_RESET "\n"
10722 };
10723 } else {
10724 error_format = (pm_parse_error_format_t) {
10725 .number_prefix = "%1" PRIi32 " | ",
10726 .blank_prefix = " | ",
10727 .divider = " ~~~~~\n"
10728 };
10729 }
10730 } else if (max_line_number < 100) {
10731 if (highlight > 0) {
10732 error_format = (pm_parse_error_format_t) {
10733 .number_prefix = PM_COLOR_GRAY "%2" PRIi32 " | " PM_COLOR_RESET,
10734 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10735 .divider = PM_COLOR_GRAY " ~~~~~~" PM_COLOR_RESET "\n"
10736 };
10737 } else {
10738 error_format = (pm_parse_error_format_t) {
10739 .number_prefix = "%2" PRIi32 " | ",
10740 .blank_prefix = " | ",
10741 .divider = " ~~~~~~\n"
10742 };
10743 }
10744 } else if (max_line_number < 1000) {
10745 if (highlight > 0) {
10746 error_format = (pm_parse_error_format_t) {
10747 .number_prefix = PM_COLOR_GRAY "%3" PRIi32 " | " PM_COLOR_RESET,
10748 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10749 .divider = PM_COLOR_GRAY " ~~~~~~~" PM_COLOR_RESET "\n"
10750 };
10751 } else {
10752 error_format = (pm_parse_error_format_t) {
10753 .number_prefix = "%3" PRIi32 " | ",
10754 .blank_prefix = " | ",
10755 .divider = " ~~~~~~~\n"
10756 };
10757 }
10758 } else if (max_line_number < 10000) {
10759 if (highlight > 0) {
10760 error_format = (pm_parse_error_format_t) {
10761 .number_prefix = PM_COLOR_GRAY "%4" PRIi32 " | " PM_COLOR_RESET,
10762 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10763 .divider = PM_COLOR_GRAY " ~~~~~~~~" PM_COLOR_RESET "\n"
10764 };
10765 } else {
10766 error_format = (pm_parse_error_format_t) {
10767 .number_prefix = "%4" PRIi32 " | ",
10768 .blank_prefix = " | ",
10769 .divider = " ~~~~~~~~\n"
10770 };
10771 }
10772 } else {
10773 if (highlight > 0) {
10774 error_format = (pm_parse_error_format_t) {
10775 .number_prefix = PM_COLOR_GRAY "%5" PRIi32 " | " PM_COLOR_RESET,
10776 .blank_prefix = PM_COLOR_GRAY " | " PM_COLOR_RESET,
10777 .divider = PM_COLOR_GRAY " ~~~~~~~~" PM_COLOR_RESET "\n"
10778 };
10779 } else {
10780 error_format = (pm_parse_error_format_t) {
10781 .number_prefix = "%5" PRIi32 " | ",
10782 .blank_prefix = " | ",
10783 .divider = " ~~~~~~~~\n"
10784 };
10785 }
10786 }
10787
10788 error_format.blank_prefix_length = strlen(error_format.blank_prefix);
10789 error_format.divider_length = strlen(error_format.divider);
10790
10791 // Now we're going to iterate through every error in our error list and
10792 // display it. While we're iterating, we will display some padding lines of
10793 // the source before the error to give some context. We'll be careful not to
10794 // display the same line twice in case the errors are close enough in the
10795 // source.
10796 int32_t last_line = parser->start_line - 1;
10797 uint32_t last_column_start = 0;
10798 const pm_encoding_t *encoding = parser->encoding;
10799
10800 for (size_t index = 0; index < error_list->size; index++) {
10801 pm_parse_error_t *error = &errors[index];
10802
10803 // Here we determine how many lines of padding of the source to display,
10804 // based on the difference from the last line that was displaid.
10805 if (error->line - last_line > 1) {
10806 if (error->line - last_line > 2) {
10807 if ((index != 0) && (error->line - last_line > 3)) {
10808 pm_buffer_append_string(buffer, error_format.divider, error_format.divider_length);
10809 }
10810
10811 pm_buffer_append_string(buffer, " ", 2);
10812 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, error->line - 2, 0, 0, buffer);
10813 }
10814
10815 pm_buffer_append_string(buffer, " ", 2);
10816 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, error->line - 1, 0, 0, buffer);
10817 }
10818
10819 // If this is the first error or we're on a new line, then we'll display
10820 // the line that has the error in it.
10821 if ((index == 0) || (error->line != last_line)) {
10822 if (highlight > 1) {
10823 pm_buffer_append_literal(buffer, PM_COLOR_RED "> " PM_COLOR_RESET);
10824 } else if (highlight > 0) {
10825 pm_buffer_append_literal(buffer, PM_COLOR_BOLD "> " PM_COLOR_RESET);
10826 } else {
10827 pm_buffer_append_literal(buffer, "> ");
10828 }
10829
10830 last_column_start = error->column_start;
10831
10832 // Find the maximum column end of all the errors on this line.
10833 uint32_t column_end = error->column_end;
10834 for (size_t next_index = index + 1; next_index < error_list->size; next_index++) {
10835 if (errors[next_index].line != error->line) break;
10836 if (errors[next_index].column_end > column_end) column_end = errors[next_index].column_end;
10837 }
10838
10839 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, error->line, error->column_start, column_end, buffer);
10840 }
10841
10842 const uint8_t *start = &parser->start[newline_list->offsets[error->line - start_line]];
10843 if (start == parser->end) pm_buffer_append_byte(buffer, '\n');
10844
10845 // Now we'll display the actual error message. We'll do this by first
10846 // putting the prefix to the line, then a bunch of blank spaces
10847 // depending on the column, then as many carets as we need to display
10848 // the width of the error, then the error message itself.
10849 //
10850 // Note that this doesn't take into account the width of the actual
10851 // character when displaid in the terminal. For some east-asian
10852 // languages or emoji, this means it can be thrown off pretty badly. We
10853 // will need to solve this eventually.
10854 pm_buffer_append_string(buffer, " ", 2);
10855 pm_buffer_append_string(buffer, error_format.blank_prefix, error_format.blank_prefix_length);
10856
10857 size_t column = 0;
10858 if (last_column_start >= PM_ERROR_TRUNCATE) {
10859 pm_buffer_append_string(buffer, " ", 4);
10860 column = last_column_start;
10861 }
10862
10863 while (column < error->column_start) {
10864 pm_buffer_append_byte(buffer, ' ');
10865
10866 size_t char_width = encoding->char_width(start + column, parser->end - (start + column));
10867 column += (char_width == 0 ? 1 : char_width);
10868 }
10869
10870 if (highlight > 1) pm_buffer_append_literal(buffer, PM_COLOR_RED);
10871 else if (highlight > 0) pm_buffer_append_literal(buffer, PM_COLOR_BOLD);
10872 pm_buffer_append_byte(buffer, '^');
10873
10874 size_t char_width = encoding->char_width(start + column, parser->end - (start + column));
10875 column += (char_width == 0 ? 1 : char_width);
10876
10877 while (column < error->column_end) {
10878 pm_buffer_append_byte(buffer, '~');
10879
10880 size_t char_width = encoding->char_width(start + column, parser->end - (start + column));
10881 column += (char_width == 0 ? 1 : char_width);
10882 }
10883
10884 if (highlight > 0) pm_buffer_append_literal(buffer, PM_COLOR_RESET);
10885
10886 if (inline_messages) {
10887 pm_buffer_append_byte(buffer, ' ');
10888 assert(error->error != NULL);
10889
10890 const char *message = error->error->message;
10891 pm_buffer_append_string(buffer, message, strlen(message));
10892 }
10893
10894 pm_buffer_append_byte(buffer, '\n');
10895
10896 // Here we determine how many lines of padding to display after the
10897 // error, depending on where the next error is in source.
10898 last_line = error->line;
10899 int32_t next_line;
10900
10901 if (index == error_list->size - 1) {
10902 next_line = (((int32_t) newline_list->size) + parser->start_line);
10903
10904 // If the file ends with a newline, subtract one from our "next_line"
10905 // so that we don't output an extra line at the end of the file
10906 if ((parser->start + newline_list->offsets[newline_list->size - 1]) == parser->end) {
10907 next_line--;
10908 }
10909 }
10910 else {
10911 next_line = errors[index + 1].line;
10912 }
10913
10914 if (next_line - last_line > 1) {
10915 pm_buffer_append_string(buffer, " ", 2);
10916 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, ++last_line, 0, 0, buffer);
10917 }
10918
10919 if (next_line - last_line > 1) {
10920 pm_buffer_append_string(buffer, " ", 2);
10921 pm_parse_errors_format_line(parser, newline_list, error_format.number_prefix, ++last_line, 0, 0, buffer);
10922 }
10923 }
10924
10925 // Finally, we'll free the array of errors that we allocated.
10926 xfree(errors);
10927}
10928
10929#undef PM_ERROR_TRUNCATE
10930#undef PM_COLOR_GRAY
10931#undef PM_COLOR_RED
10932#undef PM_COLOR_RESET
10933
10940static bool
10941pm_parse_process_error_utf8_p(const pm_parser_t *parser, const pm_location_t *location)
10942{
10943 const size_t start_line = pm_newline_list_line_column(&parser->newline_list, location->start, 1).line;
10944 const size_t end_line = pm_newline_list_line_column(&parser->newline_list, location->end, 1).line;
10945
10946 const uint8_t *start = parser->start + parser->newline_list.offsets[start_line - 1];
10947 const uint8_t *end = ((end_line == parser->newline_list.size) ? parser->end : (parser->start + parser->newline_list.offsets[end_line]));
10948 size_t width;
10949
10950 while (start < end) {
10951 if ((width = pm_encoding_utf_8_char_width(start, end - start)) == 0) return false;
10952 start += width;
10953 }
10954
10955 return true;
10956}
10957
10962static VALUE
10963pm_parse_process_error(const pm_parse_result_t *result)
10964{
10965 const pm_parser_t *parser = &result->parser;
10966 const pm_diagnostic_t *head = (const pm_diagnostic_t *) parser->error_list.head;
10967 bool valid_utf8 = true;
10968
10969 pm_buffer_t buffer = { 0 };
10970 const pm_string_t *filepath = &parser->filepath;
10971
10972 int highlight = rb_stderr_tty_p();
10973 if (highlight) {
10974 const char *no_color = getenv("NO_COLOR");
10975 highlight = (no_color == NULL || no_color[0] == '\0') ? 2 : 1;
10976 }
10977
10978 for (const pm_diagnostic_t *error = head; error != NULL; error = (const pm_diagnostic_t *) error->node.next) {
10979 switch (error->level) {
10981 // It is implicitly assumed that the error messages will be
10982 // encodeable as UTF-8. Because of this, we can't include source
10983 // examples that contain invalid byte sequences. So if any source
10984 // examples include invalid UTF-8 byte sequences, we will skip
10985 // showing source examples entirely.
10986 if (valid_utf8 && !pm_parse_process_error_utf8_p(parser, &error->location)) {
10987 valid_utf8 = false;
10988 }
10989 break;
10991 // Any errors with the level PM_ERROR_LEVEL_ARGUMENT take over as
10992 // the only argument that gets raised. This is to allow priority
10993 // messages that should be handled before anything else.
10994 int32_t line_number = (int32_t) pm_location_line_number(parser, &error->location);
10995
10996 pm_buffer_append_format(
10997 &buffer,
10998 "%.*s:%" PRIi32 ": %s",
10999 (int) pm_string_length(filepath),
11000 pm_string_source(filepath),
11001 line_number,
11002 error->message
11003 );
11004
11005 if (pm_parse_process_error_utf8_p(parser, &error->location)) {
11006 pm_buffer_append_byte(&buffer, '\n');
11007
11008 pm_list_node_t *list_node = (pm_list_node_t *) error;
11009 pm_list_t error_list = { .size = 1, .head = list_node, .tail = list_node };
11010
11011 pm_parse_errors_format(parser, &error_list, &buffer, highlight, false);
11012 }
11013
11014 VALUE value = rb_exc_new(rb_eArgError, pm_buffer_value(&buffer), pm_buffer_length(&buffer));
11015 pm_buffer_free(&buffer);
11016
11017 return value;
11018 }
11019 case PM_ERROR_LEVEL_LOAD: {
11020 // Load errors are much simpler, because they don't include any of
11021 // the source in them. We create the error directly from the
11022 // message.
11023 VALUE message = rb_enc_str_new_cstr(error->message, rb_locale_encoding());
11024 VALUE value = rb_exc_new3(rb_eLoadError, message);
11025 rb_ivar_set(value, rb_intern_const("@path"), Qnil);
11026 return value;
11027 }
11028 }
11029 }
11030
11031 pm_buffer_append_format(
11032 &buffer,
11033 "%.*s:%" PRIi32 ": syntax error%s found\n",
11034 (int) pm_string_length(filepath),
11035 pm_string_source(filepath),
11036 (int32_t) pm_location_line_number(parser, &head->location),
11037 (parser->error_list.size > 1) ? "s" : ""
11038 );
11039
11040 if (valid_utf8) {
11041 pm_parse_errors_format(parser, &parser->error_list, &buffer, highlight, true);
11042 }
11043 else {
11044 for (const pm_diagnostic_t *error = head; error != NULL; error = (const pm_diagnostic_t *) error->node.next) {
11045 if (error != head) pm_buffer_append_byte(&buffer, '\n');
11046 pm_buffer_append_format(&buffer, "%.*s:%" PRIi32 ": %s", (int) pm_string_length(filepath), pm_string_source(filepath), (int32_t) pm_location_line_number(parser, &error->location), error->message);
11047 }
11048 }
11049
11050 VALUE message = rb_enc_str_new(pm_buffer_value(&buffer), pm_buffer_length(&buffer), result->node.encoding);
11051 VALUE error = rb_exc_new_str(rb_eSyntaxError, message);
11052
11053 rb_encoding *filepath_encoding = result->node.filepath_encoding != NULL ? result->node.filepath_encoding : rb_utf8_encoding();
11054 VALUE path = rb_enc_str_new((const char *) pm_string_source(filepath), pm_string_length(filepath), filepath_encoding);
11055
11056 rb_ivar_set(error, rb_intern_const("@path"), path);
11057 pm_buffer_free(&buffer);
11058
11059 return error;
11060}
11061
11067static VALUE
11068pm_parse_process(pm_parse_result_t *result, pm_node_t *node, VALUE *script_lines)
11069{
11070 pm_parser_t *parser = &result->parser;
11071
11072 // First, set up the scope node so that the AST node is attached and can be
11073 // freed regardless of whether or we return an error.
11074 pm_scope_node_t *scope_node = &result->node;
11075 rb_encoding *filepath_encoding = scope_node->filepath_encoding;
11076 int coverage_enabled = scope_node->coverage_enabled;
11077
11078 pm_scope_node_init(node, scope_node, NULL);
11079 scope_node->filepath_encoding = filepath_encoding;
11080
11081 scope_node->encoding = rb_enc_find(parser->encoding->name);
11082 if (!scope_node->encoding) rb_bug("Encoding not found %s!", parser->encoding->name);
11083
11084 scope_node->coverage_enabled = coverage_enabled;
11085
11086 // If RubyVM.keep_script_lines is set to true, then we need to create that
11087 // array of script lines here.
11088 if (script_lines != NULL) {
11089 *script_lines = rb_ary_new_capa(parser->newline_list.size);
11090
11091 for (size_t index = 0; index < parser->newline_list.size; index++) {
11092 size_t offset = parser->newline_list.offsets[index];
11093 size_t length = index == parser->newline_list.size - 1 ? ((size_t) (parser->end - (parser->start + offset))) : (parser->newline_list.offsets[index + 1] - offset);
11094 rb_ary_push(*script_lines, rb_enc_str_new((const char *) parser->start + offset, length, scope_node->encoding));
11095 }
11096
11097 scope_node->script_lines = script_lines;
11098 }
11099
11100 // Emit all of the various warnings from the parse.
11101 const pm_diagnostic_t *warning;
11102 const char *warning_filepath = (const char *) pm_string_source(&parser->filepath);
11103
11104 for (warning = (const pm_diagnostic_t *) parser->warning_list.head; warning != NULL; warning = (const pm_diagnostic_t *) warning->node.next) {
11105 int line = pm_location_line_number(parser, &warning->location);
11106
11107 if (warning->level == PM_WARNING_LEVEL_VERBOSE) {
11108 rb_enc_compile_warning(scope_node->encoding, warning_filepath, line, "%s", warning->message);
11109 }
11110 else {
11111 rb_enc_compile_warn(scope_node->encoding, warning_filepath, line, "%s", warning->message);
11112 }
11113 }
11114
11115 // If there are errors, raise an appropriate error and free the result.
11116 if (parser->error_list.size > 0) {
11117 VALUE error = pm_parse_process_error(result);
11118
11119 // TODO: We need to set the backtrace.
11120 // rb_funcallv(error, rb_intern("set_backtrace"), 1, &path);
11121 return error;
11122 }
11123
11124 // Now set up the constant pool and intern all of the various constants into
11125 // their corresponding IDs.
11126 scope_node->parser = parser;
11127 scope_node->constants = parser->constant_pool.size ? xcalloc(parser->constant_pool.size, sizeof(ID)) : NULL;
11128
11129 for (uint32_t index = 0; index < parser->constant_pool.size; index++) {
11130 pm_constant_t *constant = &parser->constant_pool.constants[index];
11131 scope_node->constants[index] = rb_intern3((const char *) constant->start, constant->length, scope_node->encoding);
11132 }
11133
11134 scope_node->index_lookup_table = st_init_numtable();
11135 pm_constant_id_list_t *locals = &scope_node->locals;
11136 for (size_t index = 0; index < locals->size; index++) {
11137 st_insert(scope_node->index_lookup_table, locals->ids[index], index);
11138 }
11139
11140 // If we got here, this is a success and we can return Qnil to indicate that
11141 // no error should be raised.
11142 result->parsed = true;
11143 return Qnil;
11144}
11145
11150static void
11151pm_options_frozen_string_literal_init(pm_options_t *options)
11152{
11153 int frozen_string_literal = rb_iseq_opt_frozen_string_literal();
11154
11155 switch (frozen_string_literal) {
11156 case ISEQ_FROZEN_STRING_LITERAL_UNSET:
11157 break;
11158 case ISEQ_FROZEN_STRING_LITERAL_DISABLED:
11160 break;
11161 case ISEQ_FROZEN_STRING_LITERAL_ENABLED:
11163 break;
11164 default:
11165 rb_bug("pm_options_frozen_string_literal_init: invalid frozen_string_literal=%d", frozen_string_literal);
11166 break;
11167 }
11168}
11169
11174static inline VALUE
11175pm_parse_file_script_lines(const pm_scope_node_t *scope_node, const pm_parser_t *parser)
11176{
11177 const pm_newline_list_t *newline_list = &parser->newline_list;
11178 const char *start = (const char *) parser->start;
11179 const char *end = (const char *) parser->end;
11180
11181 // If we end exactly on a newline, then there's no need to push on a final
11182 // segment. If we don't, then we need to push on the last offset up to the
11183 // end of the string.
11184 size_t last_offset = newline_list->offsets[newline_list->size - 1];
11185 bool last_push = start + last_offset != end;
11186
11187 // Create the ruby strings that represent the lines of the source.
11188 VALUE lines = rb_ary_new_capa(newline_list->size - (last_push ? 0 : 1));
11189
11190 for (size_t index = 0; index < newline_list->size - 1; index++) {
11191 size_t offset = newline_list->offsets[index];
11192 size_t length = newline_list->offsets[index + 1] - offset;
11193
11194 rb_ary_push(lines, rb_enc_str_new(start + offset, length, scope_node->encoding));
11195 }
11196
11197 // Push on the last line if we need to.
11198 if (last_push) {
11199 rb_ary_push(lines, rb_enc_str_new(start + last_offset, end - (start + last_offset), scope_node->encoding));
11200 }
11201
11202 return lines;
11203}
11204
11205// This is essentially pm_string_mapped_init(), preferring to memory map the
11206// file, with additional handling for files that require blocking to properly
11207// read (e.g. pipes).
11209pm_read_file(pm_string_t *string, const char *filepath)
11210{
11211#ifdef _WIN32
11212 // Open the file for reading.
11213 int length = MultiByteToWideChar(CP_UTF8, 0, filepath, -1, NULL, 0);
11214 if (length == 0) return PM_STRING_INIT_ERROR_GENERIC;
11215
11216 WCHAR *wfilepath = xmalloc(sizeof(WCHAR) * ((size_t) length));
11217 if ((wfilepath == NULL) || (MultiByteToWideChar(CP_UTF8, 0, filepath, -1, wfilepath, length) == 0)) {
11218 xfree(wfilepath);
11220 }
11221
11222 HANDLE file = CreateFileW(wfilepath, GENERIC_READ, FILE_SHARE_READ | FILE_SHARE_WRITE, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_READONLY, NULL);
11223 if (file == INVALID_HANDLE_VALUE) {
11225
11226 if (GetLastError() == ERROR_ACCESS_DENIED) {
11227 DWORD attributes = GetFileAttributesW(wfilepath);
11228 if ((attributes != INVALID_FILE_ATTRIBUTES) && (attributes & FILE_ATTRIBUTE_DIRECTORY)) {
11230 }
11231 }
11232
11233 xfree(wfilepath);
11234 return result;
11235 }
11236
11237 // Get the file size.
11238 DWORD file_size = GetFileSize(file, NULL);
11239 if (file_size == INVALID_FILE_SIZE) {
11240 CloseHandle(file);
11241 xfree(wfilepath);
11243 }
11244
11245 // If the file is empty, then we don't need to do anything else, we'll set
11246 // the source to a constant empty string and return.
11247 if (file_size == 0) {
11248 CloseHandle(file);
11249 xfree(wfilepath);
11250 const uint8_t source[] = "";
11251 *string = (pm_string_t) { .type = PM_STRING_CONSTANT, .source = source, .length = 0 };
11253 }
11254
11255 // Create a mapping of the file.
11256 HANDLE mapping = CreateFileMapping(file, NULL, PAGE_READONLY, 0, 0, NULL);
11257 if (mapping == NULL) {
11258 CloseHandle(file);
11259 xfree(wfilepath);
11261 }
11262
11263 // Map the file into memory.
11264 uint8_t *source = (uint8_t *) MapViewOfFile(mapping, FILE_MAP_READ, 0, 0, 0);
11265 CloseHandle(mapping);
11266 CloseHandle(file);
11267 xfree(wfilepath);
11268
11269 if (source == NULL) {
11271 }
11272
11273 *string = (pm_string_t) { .type = PM_STRING_MAPPED, .source = source, .length = (size_t) file_size };
11275#elif defined(_POSIX_MAPPED_FILES)
11276 // Open the file for reading
11277 const int open_mode = O_RDONLY | O_NONBLOCK;
11278 int fd = open(filepath, open_mode);
11279 if (fd == -1) {
11281 }
11282
11283 // Stat the file to get the file size
11284 struct stat sb;
11285 if (fstat(fd, &sb) == -1) {
11286 close(fd);
11288 }
11289
11290 // Ensure it is a file and not a directory
11291 if (S_ISDIR(sb.st_mode)) {
11292 close(fd);
11294 }
11295
11296 // We need to wait for data first before reading from pipes and character
11297 // devices. To not block the entire VM, we need to release the GVL while
11298 // reading. Use IO#read to do this and let the GC handle closing the FD.
11299 if (S_ISFIFO(sb.st_mode) || S_ISCHR(sb.st_mode)) {
11300 VALUE io = rb_io_fdopen((int) fd, open_mode, filepath);
11302 VALUE contents = rb_funcall(io, rb_intern("read"), 0);
11303
11304 if (!RB_TYPE_P(contents, T_STRING)) {
11306 }
11307
11308 long len = RSTRING_LEN(contents);
11309 if (len < 0) {
11311 }
11312
11313 size_t length = (size_t) len;
11314 uint8_t *source = malloc(length);
11315 memcpy(source, RSTRING_PTR(contents), length);
11316 *string = (pm_string_t) { .type = PM_STRING_OWNED, .source = source, .length = length };
11317
11319 }
11320
11321 // mmap the file descriptor to virtually get the contents
11322 size_t size = (size_t) sb.st_size;
11323 uint8_t *source = NULL;
11324
11325 if (size == 0) {
11326 close(fd);
11327 const uint8_t source[] = "";
11328 *string = (pm_string_t) { .type = PM_STRING_CONSTANT, .source = source, .length = 0 };
11330 }
11331
11332 source = mmap(NULL, size, PROT_READ, MAP_PRIVATE, fd, 0);
11333 if (source == MAP_FAILED) {
11334 close(fd);
11336 }
11337
11338 close(fd);
11339 *string = (pm_string_t) { .type = PM_STRING_MAPPED, .source = source, .length = size };
11341#else
11342 return pm_string_file_init(string, filepath);
11343#endif
11344}
11345
11350VALUE
11351pm_load_file(pm_parse_result_t *result, VALUE filepath, bool load_error)
11352{
11353 pm_string_init_result_t init_result = pm_read_file(&result->input, RSTRING_PTR(filepath));
11354
11355 if (init_result == PM_STRING_INIT_SUCCESS) {
11356 pm_options_frozen_string_literal_init(&result->options);
11357 return Qnil;
11358 }
11359
11360 int err;
11361 if (init_result == PM_STRING_INIT_ERROR_DIRECTORY) {
11362 err = EISDIR;
11363 } else {
11364#ifdef _WIN32
11365 err = rb_w32_map_errno(GetLastError());
11366#else
11367 err = errno;
11368#endif
11369 }
11370
11371 VALUE error;
11372 if (load_error) {
11373 VALUE message = rb_str_buf_new_cstr(strerror(err));
11374 rb_str_cat2(message, " -- ");
11375 rb_str_append(message, filepath);
11376
11377 error = rb_exc_new3(rb_eLoadError, message);
11378 rb_ivar_set(error, rb_intern_const("@path"), filepath);
11379 } else {
11380 error = rb_syserr_new(err, RSTRING_PTR(filepath));
11381 RB_GC_GUARD(filepath);
11382 }
11383
11384 return error;
11385}
11386
11393VALUE
11394pm_parse_file(pm_parse_result_t *result, VALUE filepath, VALUE *script_lines)
11395{
11396 result->node.filepath_encoding = rb_enc_get(filepath);
11397 pm_options_filepath_set(&result->options, RSTRING_PTR(filepath));
11398 RB_GC_GUARD(filepath);
11399
11400 pm_options_version_for_current_ruby_set(&result->options);
11401
11402 pm_parser_init(&result->parser, pm_string_source(&result->input), pm_string_length(&result->input), &result->options);
11403 pm_node_t *node = pm_parse(&result->parser);
11404
11405 VALUE error = pm_parse_process(result, node, script_lines);
11406
11407 // If we're parsing a filepath, then we need to potentially support the
11408 // SCRIPT_LINES__ constant, which can be a hash that has an array of lines
11409 // of every read file.
11410 ID id_script_lines = rb_intern("SCRIPT_LINES__");
11411
11412 if (rb_const_defined_at(rb_cObject, id_script_lines)) {
11413 VALUE constant_script_lines = rb_const_get_at(rb_cObject, id_script_lines);
11414
11415 if (RB_TYPE_P(constant_script_lines, T_HASH)) {
11416 rb_hash_aset(constant_script_lines, filepath, pm_parse_file_script_lines(&result->node, &result->parser));
11417 }
11418 }
11419
11420 return error;
11421}
11422
11427VALUE
11428pm_load_parse_file(pm_parse_result_t *result, VALUE filepath, VALUE *script_lines)
11429{
11430 VALUE error = pm_load_file(result, filepath, false);
11431 if (NIL_P(error)) {
11432 error = pm_parse_file(result, filepath, script_lines);
11433 }
11434
11435 return error;
11436}
11437
11444VALUE
11445pm_parse_string(pm_parse_result_t *result, VALUE source, VALUE filepath, VALUE *script_lines)
11446{
11447 rb_encoding *encoding = rb_enc_get(source);
11448 if (!rb_enc_asciicompat(encoding)) {
11449 return rb_exc_new_cstr(rb_eArgError, "invalid source encoding");
11450 }
11451
11452 pm_options_frozen_string_literal_init(&result->options);
11453 pm_string_constant_init(&result->input, RSTRING_PTR(source), RSTRING_LEN(source));
11454 pm_options_encoding_set(&result->options, rb_enc_name(encoding));
11455
11456 result->node.filepath_encoding = rb_enc_get(filepath);
11457 pm_options_filepath_set(&result->options, RSTRING_PTR(filepath));
11458 RB_GC_GUARD(filepath);
11459
11460 pm_options_version_for_current_ruby_set(&result->options);
11461
11462 pm_parser_init(&result->parser, pm_string_source(&result->input), pm_string_length(&result->input), &result->options);
11463 pm_node_t *node = pm_parse(&result->parser);
11464
11465 return pm_parse_process(result, node, script_lines);
11466}
11467
11469 VALUE rb_stdin;
11470 int eof_seen;
11471};
11472
11473static int
11474pm_parse_stdin_eof(void *stream)
11475{
11476 struct rb_stdin_wrapper * wrapped_stdin = (struct rb_stdin_wrapper *)stream;
11477 return wrapped_stdin->eof_seen;
11478}
11479
11480VALUE rb_io_gets_limit_internal(VALUE io, long limit);
11481
11485static char *
11486pm_parse_stdin_fgets(char *string, int size, void *stream)
11487{
11488 RUBY_ASSERT(size > 0);
11489
11490 struct rb_stdin_wrapper * wrapped_stdin = (struct rb_stdin_wrapper *)stream;
11491
11492 VALUE line = rb_io_gets_limit_internal(wrapped_stdin->rb_stdin, size - 1);
11493 if (NIL_P(line)) {
11494 return NULL;
11495 }
11496
11497 const char *cstr = RSTRING_PTR(line);
11498 long length = RSTRING_LEN(line);
11499
11500 memcpy(string, cstr, length);
11501 string[length] = '\0';
11502
11503 // We're reading strings from stdin via gets. We'll assume that if the
11504 // string is smaller than the requested length, and doesn't end with a
11505 // newline, that we hit EOF.
11506 if (length < (size - 1) && string[length - 1] != '\n') {
11507 wrapped_stdin->eof_seen = 1;
11508 }
11509
11510 return string;
11511}
11512
11513// We need access to this function when we're done parsing stdin.
11514void rb_reset_argf_lineno(long n);
11515
11521VALUE
11522pm_parse_stdin(pm_parse_result_t *result)
11523{
11524 pm_options_frozen_string_literal_init(&result->options);
11525
11526 struct rb_stdin_wrapper wrapped_stdin = {
11527 rb_stdin,
11528 0
11529 };
11530
11531 pm_buffer_t buffer;
11532 pm_node_t *node = pm_parse_stream(&result->parser, &buffer, (void *) &wrapped_stdin, pm_parse_stdin_fgets, pm_parse_stdin_eof, &result->options);
11533
11534 // Copy the allocated buffer contents into the input string so that it gets
11535 // freed. At this point we've handed over ownership, so we don't need to
11536 // free the buffer itself.
11537 pm_string_owned_init(&result->input, (uint8_t *) pm_buffer_value(&buffer), pm_buffer_length(&buffer));
11538
11539 // When we're done parsing, we reset $. because we don't want the fact that
11540 // we went through an IO object to be visible to the user.
11541 rb_reset_argf_lineno(0);
11542
11543 return pm_parse_process(result, node, NULL);
11544}
11545
11546#define PM_VERSION_FOR_RELEASE(major, minor) PM_VERSION_FOR_RELEASE_IMPL(major, minor)
11547#define PM_VERSION_FOR_RELEASE_IMPL(major, minor) PM_OPTIONS_VERSION_CRUBY_##major##_##minor
11548
11549void pm_options_version_for_current_ruby_set(pm_options_t *options) {
11550 options->version = PM_VERSION_FOR_RELEASE(RUBY_API_VERSION_MAJOR, RUBY_API_VERSION_MINOR);
11551}
11552
11553#undef NEW_ISEQ
11554#define NEW_ISEQ OLD_ISEQ
11555
11556#undef NEW_CHILD_ISEQ
11557#define NEW_CHILD_ISEQ OLD_CHILD_ISEQ
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:219
@ PM_WARNING_LEVEL_VERBOSE
For warnings which should be emitted if $VERBOSE == true.
Definition diagnostic.h:415
@ PM_ERROR_LEVEL_ARGUMENT
For errors that should raise an argument error.
Definition diagnostic.h:401
@ PM_ERROR_LEVEL_LOAD
For errors that should raise a load error.
Definition diagnostic.h:404
@ PM_ERROR_LEVEL_SYNTAX
For errors that should raise a syntax error.
Definition diagnostic.h:398
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition event.h:40
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition event.h:56
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition event.h:39
#define RUBY_EVENT_LINE
Encountered a new line.
Definition event.h:38
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition event.h:42
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition event.h:55
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition event.h:41
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
Definition event.h:61
#define rb_str_new2
Old name of rb_str_new_cstr.
Definition string.h:1674
#define ALLOCV
Old name of RB_ALLOCV.
Definition memory.h:404
#define ALLOC
Old name of RB_ALLOC.
Definition memory.h:400
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition value_type.h:78
#define xfree
Old name of ruby_xfree.
Definition xmalloc.h:58
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define rb_str_cat2
Old name of rb_str_cat_cstr.
Definition string.h:1682
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
Definition long.h:60
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define xmalloc
Old name of ruby_xmalloc.
Definition xmalloc.h:53
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define ZALLOC_N
Old name of RB_ZALLOC_N.
Definition memory.h:401
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition memory.h:399
#define rb_exc_new3
Old name of rb_exc_new_str.
Definition error.h:38
#define FLONUM_P
Old name of RB_FLONUM_P.
#define Qtrue
Old name of RUBY_Qtrue.
#define INT2NUM
Old name of RB_INT2NUM.
Definition int.h:43
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define xcalloc
Old name of ruby_xcalloc.
Definition xmalloc.h:55
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define UINT2NUM
Old name of RB_UINT2NUM.
Definition int.h:46
#define CONST_ID
Old name of RUBY_CONST_ID.
Definition symbol.h:47
#define ruby_debug
This variable controls whether the interpreter is in debug mode.
Definition error.h:486
VALUE rb_eNotImpError
NotImplementedError exception.
Definition error.c:1441
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:653
VALUE rb_eStandardError
StandardError exception.
Definition error.c:1428
VALUE rb_eLoadError
LoadError exception.
Definition error.c:1449
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1431
VALUE rb_eNoMatchingPatternError
NoMatchingPatternError exception.
Definition error.c:1444
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_exc_new(VALUE etype, const char *ptr, long len)
Creates an instance of the passed exception class.
Definition error.c:1469
VALUE rb_eNoMatchingPatternKeyError
NoMatchingPatternKeyError exception.
Definition error.c:1445
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1482
VALUE rb_eSyntaxError
SyntaxError exception.
Definition error.c:1448
VALUE rb_syserr_new(int n, const char *mesg)
Creates an exception object that represents the given C errno.
Definition error.c:3895
VALUE rb_cArray
Array class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:100
VALUE rb_stdin
STDIN constant.
Definition io.c:201
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1342
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition gc.h:615
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
int rb_enc_str_coderange(VALUE str)
Scans the passed string to collect its code range.
Definition string.c:947
VALUE rb_enc_interned_str(const char *ptr, long len, rb_encoding *enc)
Identical to rb_enc_str_new(), except it returns a "f"string.
Definition string.c:12718
VALUE rb_enc_str_new_cstr(const char *ptr, rb_encoding *enc)
Identical to rb_enc_str_new(), except it assumes the passed pointer is a pointer to a C string.
Definition string.c:1155
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1117
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
VALUE rb_ary_join(VALUE ary, VALUE sep)
Recursively stringises the elements of the passed array, flattens that result, then joins the sequenc...
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
VALUE rb_io_fdopen(int fd, int flags, const char *path)
Creates an IO instance whose backend is the given file descriptor.
Definition io.c:9361
VALUE rb_range_new(VALUE beg, VALUE end, int excl)
Creates a new Range.
Definition range.c:69
VALUE rb_rational_new(VALUE num, VALUE den)
Constructs a Rational, with reduction.
Definition rational.c:2000
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3797
VALUE rb_str_tmp_new(long len)
Allocates a "temporary" string.
Definition string.c:1746
#define rb_str_new(str, len)
Allocates an instance of rb_cString.
Definition string.h:1497
#define rb_exc_new_cstr(exc, str)
Identical to rb_exc_new(), except it assumes the passed pointer is a pointer to a C string.
Definition string.h:1669
#define rb_str_buf_new_cstr(str)
Identical to rb_str_new_cstr, except done differently.
Definition string.h:1638
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition string.c:4034
VALUE rb_str_freeze(VALUE str)
This is the implementation of String#freeze.
Definition string.c:3278
#define rb_str_new_cstr(str)
Identical to rb_str_new, except it assumes the passed pointer is a pointer to a C string.
Definition string.h:1513
VALUE rb_obj_as_string(VALUE obj)
Try converting an object to its stringised representation using its to_s method, if any.
Definition string.c:1850
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2017
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
Definition variable.c:3454
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
Definition variable.c:3786
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:285
VALUE rb_id2sym(ID id)
Allocates an instance of rb_cSymbol that has the given id.
Definition symbol.c:974
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:993
@ RUBY_IO_READABLE
IO::READABLE
Definition io.h:97
VALUE rb_io_wait(VALUE io, VALUE events, VALUE timeout)
Blocks until the passed IO is ready for the passed events.
Definition io.c:1482
int len
Length of the buffer.
Definition io.h:8
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
Definition ractor.h:235
VALUE rb_ractor_make_shareable(VALUE obj)
Destructively transforms the passed object so that multiple Ractors can share it.
Definition ractor.c:1547
#define DECIMAL_SIZE_OF(expr)
An approximation of decimal representation size.
Definition util.h:48
#define RUBY_API_VERSION_MAJOR
Major version.
Definition version.h:64
#define RUBY_API_VERSION_MINOR
Minor version.
Definition version.h:70
#define RB_INT2NUM
Just another name of rb_int2num_inline.
Definition int.h:37
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE type(ANYARGS)
ANYARGS-ed function type.
PRISM_EXPORTED_FUNCTION void pm_options_encoding_set(pm_options_t *options, const char *encoding)
Set the encoding option on the given options struct.
Definition options.c:24
PRISM_EXPORTED_FUNCTION void pm_options_free(pm_options_t *options)
Free the internal memory associated with the options.
Definition options.c:218
PRISM_EXPORTED_FUNCTION void pm_options_frozen_string_literal_set(pm_options_t *options, bool frozen_string_literal)
Set the frozen string literal option on the given options struct.
Definition options.c:48
PRISM_EXPORTED_FUNCTION void pm_options_filepath_set(pm_options_t *options, const char *filepath)
Set the filepath option on the given options struct.
Definition options.c:16
void pm_buffer_free(pm_buffer_t *buffer)
Free the memory associated with the buffer.
Definition pm_buffer.c:355
size_t pm_buffer_length(const pm_buffer_t *buffer)
Return the length of the buffer.
Definition pm_buffer.c:43
char * pm_buffer_value(const pm_buffer_t *buffer)
Return the value of the buffer.
Definition pm_buffer.c:35
uint32_t pm_constant_id_t
A constant id is a unique identifier for a constant in the constant pool.
PRISM_EXPORTED_FUNCTION size_t pm_string_length(const pm_string_t *string)
Returns the length associated with the string.
Definition pm_string.c:351
PRISM_EXPORTED_FUNCTION const uint8_t * pm_string_source(const pm_string_t *string)
Returns the start pointer associated with the string.
Definition pm_string.c:359
PRISM_EXPORTED_FUNCTION void pm_string_free(pm_string_t *string)
Free the associated memory of the given string.
Definition pm_string.c:367
PRISM_EXPORTED_FUNCTION pm_string_init_result_t pm_string_file_init(pm_string_t *string, const char *filepath)
Read the file indicated by the filepath parameter into source and load its contents and size into the...
Definition pm_string.c:210
pm_string_init_result_t
Represents the result of calling pm_string_mapped_init or pm_string_file_init.
Definition pm_string.h:105
@ PM_STRING_INIT_SUCCESS
Indicates that the string was successfully initialized.
Definition pm_string.h:107
@ PM_STRING_INIT_ERROR_GENERIC
Indicates a generic error from a string_*_init function, where the type of error should be read from ...
Definition pm_string.h:112
@ PM_STRING_INIT_ERROR_DIRECTORY
Indicates that the file that was attempted to be opened was a directory.
Definition pm_string.h:116
#define PM_ENCODING_US_ASCII_ENTRY
This is the US-ASCII encoding.
Definition encoding.h:252
#define PM_NODE_LIST_FOREACH(list, index, node)
Loop through each node in the node list, writing each node to the given pm_node_t pointer.
Definition node.h:17
PRISM_EXPORTED_FUNCTION pm_node_t * pm_parse(pm_parser_t *parser)
Parse the Ruby source associated with the given parser and return the tree.
Definition prism.c:22271
PRISM_EXPORTED_FUNCTION void pm_parser_free(pm_parser_t *parser)
Free any memory associated with the given parser.
Definition prism.c:22245
PRISM_EXPORTED_FUNCTION pm_node_t * pm_parse_stream(pm_parser_t *parser, pm_buffer_t *buffer, void *stream, pm_parse_stream_fgets_t *stream_fgets, pm_parse_stream_feof_t *stream_feof, const pm_options_t *options)
Parse a stream of Ruby source and return the tree.
Definition prism.c:22358
PRISM_EXPORTED_FUNCTION void pm_parser_init(pm_parser_t *parser, const uint8_t *source, size_t size, const pm_options_t *options)
Initialize a parser with the given start and end pointers.
Definition prism.c:21946
The main header file for the prism parser.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define errno
Ractor-aware version of errno.
Definition ruby.h:388
#define RTEST
This is an old name of RB_TEST.
AliasGlobalVariableNode.
Definition ast.h:1116
struct pm_node * old_name
AliasGlobalVariableNode::old_name.
Definition ast.h:1139
struct pm_node * new_name
AliasGlobalVariableNode::new_name.
Definition ast.h:1129
AliasMethodNode.
Definition ast.h:1164
struct pm_node * old_name
AliasMethodNode::old_name.
Definition ast.h:1199
struct pm_node * new_name
AliasMethodNode::new_name.
Definition ast.h:1183
AlternationPatternNode.
Definition ast.h:1224
struct pm_node * left
AlternationPatternNode::left.
Definition ast.h:1237
struct pm_node * right
AlternationPatternNode::right.
Definition ast.h:1247
AndNode.
Definition ast.h:1272
struct pm_node * left
AndNode::left.
Definition ast.h:1288
struct pm_node * right
AndNode::right.
Definition ast.h:1301
ArgumentsNode.
Definition ast.h:1333
pm_node_t base
The embedded base node.
Definition ast.h:1335
struct pm_node_list arguments
ArgumentsNode::arguments.
Definition ast.h:1346
ArrayNode.
Definition ast.h:1364
struct pm_node_list elements
ArrayNode::elements.
Definition ast.h:1374
ArrayPatternNode.
Definition ast.h:1425
struct pm_node_list requireds
ArrayPatternNode::requireds.
Definition ast.h:1454
struct pm_node * rest
ArrayPatternNode::rest.
Definition ast.h:1464
struct pm_node * constant
ArrayPatternNode::constant.
Definition ast.h:1444
struct pm_node_list posts
ArrayPatternNode::posts.
Definition ast.h:1474
AssocNode.
Definition ast.h:1509
struct pm_node * value
AssocNode::value.
Definition ast.h:1541
struct pm_node * key
AssocNode::key.
Definition ast.h:1528
AssocSplatNode.
Definition ast.h:1566
struct pm_node * value
AssocSplatNode::value.
Definition ast.h:1579
BackReferenceReadNode.
Definition ast.h:1604
pm_node_t base
The embedded base node.
Definition ast.h:1606
BeginNode.
Definition ast.h:1635
struct pm_ensure_node * ensure_clause
BeginNode::ensure_clause.
Definition ast.h:1688
struct pm_rescue_node * rescue_clause
BeginNode::rescue_clause.
Definition ast.h:1668
struct pm_statements_node * statements
BeginNode::statements.
Definition ast.h:1658
struct pm_else_node * else_clause
BeginNode::else_clause.
Definition ast.h:1678
BlockArgumentNode.
Definition ast.h:1713
struct pm_node * expression
BlockArgumentNode::expression.
Definition ast.h:1726
BlockLocalVariableNode.
Definition ast.h:1754
BlockNode.
Definition ast.h:1782
struct pm_node * parameters
BlockNode::parameters.
Definition ast.h:1809
struct pm_node * body
BlockNode::body.
Definition ast.h:1819
pm_constant_id_list_t locals
BlockNode::locals.
Definition ast.h:1795
BlockParameterNode.
Definition ast.h:1858
BlockParametersNode.
Definition ast.h:1912
BreakNode.
Definition ast.h:1986
struct pm_arguments_node * arguments
BreakNode::arguments.
Definition ast.h:1999
A pm_buffer_t is a simple memory buffer that stores data in a contiguous block of memory.
Definition pm_buffer.h:22
CallAndWriteNode.
Definition ast.h:2030
struct pm_node * value
CallAndWriteNode::value.
Definition ast.h:2103
pm_constant_id_t read_name
CallAndWriteNode::read_name.
Definition ast.h:2073
pm_constant_id_t write_name
CallAndWriteNode::write_name.
Definition ast.h:2083
struct pm_node * receiver
CallAndWriteNode::receiver.
Definition ast.h:2043
CallNode.
Definition ast.h:2139
pm_location_t closing_loc
CallNode::closing_loc.
Definition ast.h:2220
struct pm_node * receiver
CallNode::receiver.
Definition ast.h:2158
pm_constant_id_t name
CallNode::name.
Definition ast.h:2181
pm_node_t base
The embedded base node.
Definition ast.h:2141
pm_location_t message_loc
CallNode::message_loc.
Definition ast.h:2191
struct pm_arguments_node * arguments
CallNode::arguments.
Definition ast.h:2210
struct pm_node * block
CallNode::block.
Definition ast.h:2243
CallOperatorWriteNode.
Definition ast.h:2264
pm_constant_id_t read_name
CallOperatorWriteNode::read_name.
Definition ast.h:2307
pm_constant_id_t binary_operator
CallOperatorWriteNode::binary_operator.
Definition ast.h:2327
struct pm_node * receiver
CallOperatorWriteNode::receiver.
Definition ast.h:2277
pm_constant_id_t write_name
CallOperatorWriteNode::write_name.
Definition ast.h:2317
struct pm_node * value
CallOperatorWriteNode::value.
Definition ast.h:2347
CallOrWriteNode.
Definition ast.h:2368
struct pm_node * receiver
CallOrWriteNode::receiver.
Definition ast.h:2381
struct pm_node * value
CallOrWriteNode::value.
Definition ast.h:2441
pm_constant_id_t write_name
CallOrWriteNode::write_name.
Definition ast.h:2421
pm_constant_id_t read_name
CallOrWriteNode::read_name.
Definition ast.h:2411
CallTargetNode.
Definition ast.h:2470
pm_constant_id_t name
CallTargetNode::name.
Definition ast.h:2503
struct pm_node * receiver
CallTargetNode::receiver.
Definition ast.h:2483
CapturePatternNode.
Definition ast.h:2528
struct pm_local_variable_target_node * target
CapturePatternNode::target.
Definition ast.h:2551
struct pm_node * value
CapturePatternNode::value.
Definition ast.h:2541
CaseMatchNode.
Definition ast.h:2578
struct pm_node_list conditions
CaseMatchNode::conditions.
Definition ast.h:2601
struct pm_else_node * else_clause
CaseMatchNode::else_clause.
Definition ast.h:2611
struct pm_node * predicate
CaseMatchNode::predicate.
Definition ast.h:2591
CaseNode.
Definition ast.h:2648
struct pm_node * predicate
CaseNode::predicate.
Definition ast.h:2661
struct pm_else_node * else_clause
CaseNode::else_clause.
Definition ast.h:2681
struct pm_node_list conditions
CaseNode::conditions.
Definition ast.h:2671
ClassNode.
Definition ast.h:2716
struct pm_node * constant_path
ClassNode::constant_path.
Definition ast.h:2739
pm_constant_id_list_t locals
ClassNode::locals.
Definition ast.h:2724
pm_constant_id_t name
ClassNode::name.
Definition ast.h:2789
struct pm_node * body
ClassNode::body.
Definition ast.h:2770
struct pm_node * superclass
ClassNode::superclass.
Definition ast.h:2759
ClassVariableAndWriteNode.
Definition ast.h:2804
struct pm_node * value
ClassVariableAndWriteNode::value.
Definition ast.h:2847
pm_constant_id_t name
ClassVariableAndWriteNode::name.
Definition ast.h:2817
ClassVariableOperatorWriteNode.
Definition ast.h:2862
pm_constant_id_t name
ClassVariableOperatorWriteNode::name.
Definition ast.h:2870
pm_constant_id_t binary_operator
ClassVariableOperatorWriteNode::binary_operator.
Definition ast.h:2890
struct pm_node * value
ClassVariableOperatorWriteNode::value.
Definition ast.h:2885
ClassVariableOrWriteNode.
Definition ast.h:2905
pm_constant_id_t name
ClassVariableOrWriteNode::name.
Definition ast.h:2913
struct pm_node * value
ClassVariableOrWriteNode::value.
Definition ast.h:2928
ClassVariableReadNode.
Definition ast.h:2943
pm_constant_id_t name
ClassVariableReadNode::name.
Definition ast.h:2957
ClassVariableTargetNode.
Definition ast.h:2972
pm_constant_id_t name
ClassVariableTargetNode::name.
Definition ast.h:2980
ClassVariableWriteNode.
Definition ast.h:2995
struct pm_node * value
ClassVariableWriteNode::value.
Definition ast.h:3032
pm_constant_id_t name
ClassVariableWriteNode::name.
Definition ast.h:3009
ConstantAndWriteNode.
Definition ast.h:3057
pm_location_t name_loc
ConstantAndWriteNode::name_loc.
Definition ast.h:3070
pm_constant_id_t name
ConstantAndWriteNode::name.
Definition ast.h:3065
struct pm_node * value
ConstantAndWriteNode::value.
Definition ast.h:3080
A list of constant IDs.
size_t size
The number of constant ids in the list.
size_t capacity
The number of constant ids that have been allocated in the list.
pm_constant_id_t * ids
The constant ids in the list.
ConstantOperatorWriteNode.
Definition ast.h:3095
pm_constant_id_t name
ConstantOperatorWriteNode::name.
Definition ast.h:3103
pm_location_t name_loc
ConstantOperatorWriteNode::name_loc.
Definition ast.h:3108
pm_constant_id_t binary_operator
ConstantOperatorWriteNode::binary_operator.
Definition ast.h:3123
struct pm_node * value
ConstantOperatorWriteNode::value.
Definition ast.h:3118
ConstantOrWriteNode.
Definition ast.h:3138
pm_location_t name_loc
ConstantOrWriteNode::name_loc.
Definition ast.h:3151
pm_constant_id_t name
ConstantOrWriteNode::name.
Definition ast.h:3146
struct pm_node * value
ConstantOrWriteNode::value.
Definition ast.h:3161
ConstantPathAndWriteNode.
Definition ast.h:3176
struct pm_constant_path_node * target
ConstantPathAndWriteNode::target.
Definition ast.h:3184
struct pm_node * value
ConstantPathAndWriteNode::value.
Definition ast.h:3194
ConstantPathNode.
Definition ast.h:3209
pm_constant_id_t name
ConstantPathNode::name.
Definition ast.h:3235
struct pm_node * parent
ConstantPathNode::parent.
Definition ast.h:3228
ConstantPathOperatorWriteNode.
Definition ast.h:3276
struct pm_constant_path_node * target
ConstantPathOperatorWriteNode::target.
Definition ast.h:3284
struct pm_node * value
ConstantPathOperatorWriteNode::value.
Definition ast.h:3294
pm_constant_id_t binary_operator
ConstantPathOperatorWriteNode::binary_operator.
Definition ast.h:3299
ConstantPathOrWriteNode.
Definition ast.h:3314
struct pm_node * value
ConstantPathOrWriteNode::value.
Definition ast.h:3332
struct pm_constant_path_node * target
ConstantPathOrWriteNode::target.
Definition ast.h:3322
ConstantPathTargetNode.
Definition ast.h:3347
struct pm_node * parent
ConstantPathTargetNode::parent.
Definition ast.h:3355
pm_constant_id_t name
ConstantPathTargetNode::name.
Definition ast.h:3360
ConstantPathWriteNode.
Definition ast.h:3391
struct pm_constant_path_node * target
ConstantPathWriteNode::target.
Definition ast.h:3407
struct pm_node * value
ConstantPathWriteNode::value.
Definition ast.h:3427
uint32_t size
The number of buckets in the hash map.
pm_constant_t * constants
The constants that are stored in the buckets.
ConstantReadNode.
Definition ast.h:3442
pm_node_t base
The embedded base node.
Definition ast.h:3444
pm_constant_id_t name
ConstantReadNode::name.
Definition ast.h:3456
A constant in the pool which effectively stores a string.
size_t length
The length of the string.
const uint8_t * start
A pointer to the start of the string.
ConstantTargetNode.
Definition ast.h:3471
pm_constant_id_t name
ConstantTargetNode::name.
Definition ast.h:3479
ConstantWriteNode.
Definition ast.h:3494
struct pm_node * value
ConstantWriteNode::value.
Definition ast.h:3531
pm_constant_id_t name
ConstantWriteNode::name.
Definition ast.h:3508
DefNode.
Definition ast.h:3557
struct pm_parameters_node * parameters
DefNode::parameters.
Definition ast.h:3580
pm_constant_id_t name
DefNode::name.
Definition ast.h:3565
struct pm_node * body
DefNode::body.
Definition ast.h:3585
struct pm_node * receiver
DefNode::receiver.
Definition ast.h:3575
pm_node_t base
The embedded base node.
Definition ast.h:3559
pm_constant_id_list_t locals
DefNode::locals.
Definition ast.h:3590
DefinedNode.
Definition ast.h:3635
struct pm_node * value
DefinedNode::value.
Definition ast.h:3648
This struct represents a diagnostic generated during parsing.
Definition diagnostic.h:366
pm_location_t location
The location of the diagnostic in the source.
Definition diagnostic.h:371
const char * message
The message associated with the diagnostic.
Definition diagnostic.h:377
pm_list_node_t node
The embedded base node.
Definition diagnostic.h:368
uint8_t level
The level of the diagnostic, see pm_error_level_t and pm_warning_level_t for possible values.
Definition diagnostic.h:390
ElseNode.
Definition ast.h:3673
struct pm_statements_node * statements
ElseNode::statements.
Definition ast.h:3686
EmbeddedStatementsNode.
Definition ast.h:3706
struct pm_statements_node * statements
EmbeddedStatementsNode::statements.
Definition ast.h:3719
EmbeddedVariableNode.
Definition ast.h:3739
struct pm_node * variable
EmbeddedVariableNode::variable.
Definition ast.h:3752
This struct defines the functions necessary to implement the encoding interface so we can determine h...
Definition encoding.h:23
size_t(* char_width)(const uint8_t *b, ptrdiff_t n)
Return the number of bytes that the next character takes if it is valid in the encoding.
Definition encoding.h:29
const char * name
The name of the encoding.
Definition encoding.h:56
EnsureNode.
Definition ast.h:3771
struct pm_statements_node * statements
EnsureNode::statements.
Definition ast.h:3784
FindPatternNode.
Definition ast.h:3831
struct pm_node * constant
FindPatternNode::constant.
Definition ast.h:3844
struct pm_node * right
FindPatternNode::right.
Definition ast.h:3883
struct pm_node_list requireds
FindPatternNode::requireds.
Definition ast.h:3870
struct pm_splat_node * left
FindPatternNode::left.
Definition ast.h:3857
FlipFlopNode.
Definition ast.h:3927
pm_node_t base
The embedded base node.
Definition ast.h:3929
struct pm_node * left
FlipFlopNode::left.
Definition ast.h:3935
struct pm_node * right
FlipFlopNode::right.
Definition ast.h:3940
FloatNode.
Definition ast.h:3960
double value
FloatNode::value.
Definition ast.h:3970
ForNode.
Definition ast.h:3985
struct pm_statements_node * statements
ForNode::statements.
Definition ast.h:4020
struct pm_node * collection
ForNode::collection.
Definition ast.h:4008
ForwardingSuperNode.
Definition ast.h:4119
struct pm_block_node * block
ForwardingSuperNode::block.
Definition ast.h:4129
GlobalVariableAndWriteNode.
Definition ast.h:4144
struct pm_node * value
GlobalVariableAndWriteNode::value.
Definition ast.h:4167
pm_constant_id_t name
GlobalVariableAndWriteNode::name.
Definition ast.h:4152
GlobalVariableOperatorWriteNode.
Definition ast.h:4182
pm_constant_id_t name
GlobalVariableOperatorWriteNode::name.
Definition ast.h:4190
pm_constant_id_t binary_operator
GlobalVariableOperatorWriteNode::binary_operator.
Definition ast.h:4210
struct pm_node * value
GlobalVariableOperatorWriteNode::value.
Definition ast.h:4205
GlobalVariableOrWriteNode.
Definition ast.h:4225
pm_constant_id_t name
GlobalVariableOrWriteNode::name.
Definition ast.h:4233
struct pm_node * value
GlobalVariableOrWriteNode::value.
Definition ast.h:4248
GlobalVariableReadNode.
Definition ast.h:4263
pm_constant_id_t name
GlobalVariableReadNode::name.
Definition ast.h:4277
GlobalVariableTargetNode.
Definition ast.h:4292
pm_constant_id_t name
GlobalVariableTargetNode::name.
Definition ast.h:4300
GlobalVariableWriteNode.
Definition ast.h:4315
struct pm_node * value
GlobalVariableWriteNode::value.
Definition ast.h:4352
pm_constant_id_t name
GlobalVariableWriteNode::name.
Definition ast.h:4329
HashNode.
Definition ast.h:4377
struct pm_node_list elements
HashNode::elements.
Definition ast.h:4403
HashPatternNode.
Definition ast.h:4437
struct pm_node_list elements
HashPatternNode::elements.
Definition ast.h:4463
struct pm_node * rest
HashPatternNode::rest.
Definition ast.h:4479
struct pm_node * constant
HashPatternNode::constant.
Definition ast.h:4453
IfNode.
Definition ast.h:4526
struct pm_node * predicate
IfNode::predicate.
Definition ast.h:4559
struct pm_statements_node * statements
IfNode::statements.
Definition ast.h:4586
ImaginaryNode.
Definition ast.h:4632
struct pm_node * numeric
ImaginaryNode::numeric.
Definition ast.h:4640
ImplicitNode.
Definition ast.h:4661
struct pm_node * value
ImplicitNode::value.
Definition ast.h:4669
InNode.
Definition ast.h:4711
struct pm_statements_node * statements
InNode::statements.
Definition ast.h:4724
struct pm_node * pattern
InNode::pattern.
Definition ast.h:4719
IndexAndWriteNode.
Definition ast.h:4755
struct pm_arguments_node * arguments
IndexAndWriteNode::arguments.
Definition ast.h:4778
struct pm_node * receiver
IndexAndWriteNode::receiver.
Definition ast.h:4763
struct pm_block_argument_node * block
IndexAndWriteNode::block.
Definition ast.h:4788
struct pm_node * value
IndexAndWriteNode::value.
Definition ast.h:4798
IndexOperatorWriteNode.
Definition ast.h:4819
struct pm_block_argument_node * block
IndexOperatorWriteNode::block.
Definition ast.h:4852
struct pm_node * value
IndexOperatorWriteNode::value.
Definition ast.h:4867
struct pm_arguments_node * arguments
IndexOperatorWriteNode::arguments.
Definition ast.h:4842
pm_constant_id_t binary_operator
IndexOperatorWriteNode::binary_operator.
Definition ast.h:4857
struct pm_node * receiver
IndexOperatorWriteNode::receiver.
Definition ast.h:4827
IndexOrWriteNode.
Definition ast.h:4888
struct pm_block_argument_node * block
IndexOrWriteNode::block.
Definition ast.h:4921
struct pm_node * receiver
IndexOrWriteNode::receiver.
Definition ast.h:4896
struct pm_node * value
IndexOrWriteNode::value.
Definition ast.h:4931
struct pm_arguments_node * arguments
IndexOrWriteNode::arguments.
Definition ast.h:4911
IndexTargetNode.
Definition ast.h:4960
struct pm_node * receiver
IndexTargetNode::receiver.
Definition ast.h:4968
struct pm_arguments_node * arguments
IndexTargetNode::arguments.
Definition ast.h:4978
struct pm_block_argument_node * block
IndexTargetNode::block.
Definition ast.h:4988
InstanceVariableAndWriteNode.
Definition ast.h:5003
struct pm_node * value
InstanceVariableAndWriteNode::value.
Definition ast.h:5026
pm_constant_id_t name
InstanceVariableAndWriteNode::name.
Definition ast.h:5011
InstanceVariableOperatorWriteNode.
Definition ast.h:5041
struct pm_node * value
InstanceVariableOperatorWriteNode::value.
Definition ast.h:5064
pm_constant_id_t binary_operator
InstanceVariableOperatorWriteNode::binary_operator.
Definition ast.h:5069
pm_constant_id_t name
InstanceVariableOperatorWriteNode::name.
Definition ast.h:5049
InstanceVariableOrWriteNode.
Definition ast.h:5084
struct pm_node * value
InstanceVariableOrWriteNode::value.
Definition ast.h:5107
pm_constant_id_t name
InstanceVariableOrWriteNode::name.
Definition ast.h:5092
InstanceVariableReadNode.
Definition ast.h:5122
pm_constant_id_t name
InstanceVariableReadNode::name.
Definition ast.h:5136
InstanceVariableTargetNode.
Definition ast.h:5151
pm_constant_id_t name
InstanceVariableTargetNode::name.
Definition ast.h:5159
InstanceVariableWriteNode.
Definition ast.h:5174
pm_constant_id_t name
InstanceVariableWriteNode::name.
Definition ast.h:5188
struct pm_node * value
InstanceVariableWriteNode::value.
Definition ast.h:5211
IntegerNode.
Definition ast.h:5242
pm_integer_t value
IntegerNode::value.
Definition ast.h:5252
A structure represents an arbitrary-sized integer.
Definition pm_integer.h:20
size_t length
The number of allocated values.
Definition pm_integer.h:25
uint32_t value
Embedded value for small integer.
Definition pm_integer.h:36
uint32_t * values
List of 32-bit integers.
Definition pm_integer.h:30
bool negative
Whether or not the integer is negative.
Definition pm_integer.h:42
InterpolatedMatchLastLineNode.
Definition ast.h:5280
InterpolatedRegularExpressionNode.
Definition ast.h:5326
InterpolatedStringNode.
Definition ast.h:5363
struct pm_node_list parts
InterpolatedStringNode::parts.
Definition ast.h:5376
InterpolatedSymbolNode.
Definition ast.h:5396
struct pm_node_list parts
InterpolatedSymbolNode::parts.
Definition ast.h:5409
InterpolatedXStringNode.
Definition ast.h:5429
struct pm_node_list parts
InterpolatedXStringNode::parts.
Definition ast.h:5442
KeywordHashNode.
Definition ast.h:5501
struct pm_node_list elements
KeywordHashNode::elements.
Definition ast.h:5509
KeywordRestParameterNode.
Definition ast.h:5528
LambdaNode.
Definition ast.h:5561
struct pm_node * body
LambdaNode::body.
Definition ast.h:5594
pm_location_t opening_loc
LambdaNode::opening_loc.
Definition ast.h:5579
struct pm_node * parameters
LambdaNode::parameters.
Definition ast.h:5589
pm_location_t operator_loc
LambdaNode::operator_loc.
Definition ast.h:5574
pm_constant_id_list_t locals
LambdaNode::locals.
Definition ast.h:5569
A line and column in a string.
uint32_t column
The column number.
int32_t line
The line number.
This struct represents an abstract linked list that provides common functionality.
Definition pm_list.h:46
struct pm_list_node * next
A pointer to the next node in the list.
Definition pm_list.h:48
This represents the overall linked list.
Definition pm_list.h:55
pm_list_node_t * tail
A pointer to the tail of the list.
Definition pm_list.h:63
pm_list_node_t * head
A pointer to the head of the list.
Definition pm_list.h:60
size_t size
The size of the list.
Definition pm_list.h:57
the getlocal and setlocal instructions require two parameters.
LocalVariableAndWriteNode.
Definition ast.h:5609
pm_constant_id_t name
LocalVariableAndWriteNode::name.
Definition ast.h:5632
uint32_t depth
LocalVariableAndWriteNode::depth.
Definition ast.h:5637
struct pm_node * value
LocalVariableAndWriteNode::value.
Definition ast.h:5627
LocalVariableOperatorWriteNode.
Definition ast.h:5652
uint32_t depth
LocalVariableOperatorWriteNode::depth.
Definition ast.h:5685
pm_constant_id_t binary_operator
LocalVariableOperatorWriteNode::binary_operator.
Definition ast.h:5680
struct pm_node * value
LocalVariableOperatorWriteNode::value.
Definition ast.h:5670
pm_constant_id_t name
LocalVariableOperatorWriteNode::name.
Definition ast.h:5675
LocalVariableOrWriteNode.
Definition ast.h:5700
uint32_t depth
LocalVariableOrWriteNode::depth.
Definition ast.h:5728
struct pm_node * value
LocalVariableOrWriteNode::value.
Definition ast.h:5718
pm_constant_id_t name
LocalVariableOrWriteNode::name.
Definition ast.h:5723
LocalVariableReadNode.
Definition ast.h:5743
uint32_t depth
LocalVariableReadNode::depth.
Definition ast.h:5774
pm_constant_id_t name
LocalVariableReadNode::name.
Definition ast.h:5761
LocalVariableTargetNode.
Definition ast.h:5792
uint32_t depth
LocalVariableTargetNode::depth.
Definition ast.h:5805
pm_constant_id_t name
LocalVariableTargetNode::name.
Definition ast.h:5800
LocalVariableWriteNode.
Definition ast.h:5820
struct pm_node * value
LocalVariableWriteNode::value.
Definition ast.h:5874
uint32_t depth
LocalVariableWriteNode::depth.
Definition ast.h:5847
pm_constant_id_t name
LocalVariableWriteNode::name.
Definition ast.h:5834
This represents a range of bytes in the source string to which a node or token corresponds.
Definition ast.h:544
const uint8_t * start
A pointer to the start location of the range in the source.
Definition ast.h:546
const uint8_t * end
A pointer to the end location of the range in the source.
Definition ast.h:549
MatchLastLineNode.
Definition ast.h:5912
MatchPredicateNode.
Definition ast.h:5950
struct pm_node * pattern
MatchPredicateNode::pattern.
Definition ast.h:5963
struct pm_node * value
MatchPredicateNode::value.
Definition ast.h:5958
MatchRequiredNode.
Definition ast.h:5983
struct pm_node * value
MatchRequiredNode::value.
Definition ast.h:5996
struct pm_node * pattern
MatchRequiredNode::pattern.
Definition ast.h:6045
MatchWriteNode.
Definition ast.h:6070
struct pm_node_list targets
MatchWriteNode::targets.
Definition ast.h:6083
struct pm_call_node * call
MatchWriteNode::call.
Definition ast.h:6078
ModuleNode.
Definition ast.h:6113
struct pm_node * constant_path
ModuleNode::constant_path.
Definition ast.h:6131
struct pm_node * body
ModuleNode::body.
Definition ast.h:6136
pm_constant_id_list_t locals
ModuleNode::locals.
Definition ast.h:6121
pm_constant_id_t name
ModuleNode::name.
Definition ast.h:6146
MultiTargetNode.
Definition ast.h:6166
struct pm_node_list lefts
MultiTargetNode::lefts.
Definition ast.h:6184
struct pm_node * rest
MultiTargetNode::rest.
Definition ast.h:6204
struct pm_node_list rights
MultiTargetNode::rights.
Definition ast.h:6214
This is a node in the multi target state linked list.
As we're compiling a multi target, we need to track additional information whenever there is a parent...
MultiWriteNode.
Definition ast.h:6249
struct pm_node * value
MultiWriteNode::value.
Definition ast.h:6337
struct pm_node * rest
MultiWriteNode::rest.
Definition ast.h:6287
struct pm_node_list rights
MultiWriteNode::rights.
Definition ast.h:6297
struct pm_node_list lefts
MultiWriteNode::lefts.
Definition ast.h:6267
A list of offsets of newlines in a string.
const uint8_t * start
A pointer to the start of the source string.
size_t * offsets
The list of offsets.
size_t size
The number of offsets in the list.
NextNode.
Definition ast.h:6352
struct pm_arguments_node * arguments
NextNode::arguments.
Definition ast.h:6360
A list of nodes in the source, most often used for lists of children.
Definition ast.h:557
size_t size
The number of nodes in the list.
Definition ast.h:559
struct pm_node ** nodes
The nodes in the list.
Definition ast.h:565
This compiler defines its own concept of the location of a node.
int32_t line
This is the line number of a node.
uint32_t node_id
This is a unique identifier for the node.
This is the base structure that represents a node in the syntax tree.
Definition ast.h:1052
pm_node_type_t type
This represents the type of the node.
Definition ast.h:1057
uint32_t node_id
The unique identifier for this node, which is deterministic based on the source.
Definition ast.h:1069
pm_node_flags_t flags
This represents any flags on the node.
Definition ast.h:1063
pm_location_t location
This is the location of the node in the source.
Definition ast.h:1075
NumberedParametersNode.
Definition ast.h:6427
NumberedReferenceReadNode.
Definition ast.h:6450
uint32_t number
NumberedReferenceReadNode::number.
Definition ast.h:6466
OptionalKeywordParameterNode.
Definition ast.h:6485
pm_constant_id_t name
OptionalKeywordParameterNode::name.
Definition ast.h:6493
struct pm_node * value
OptionalKeywordParameterNode::value.
Definition ast.h:6503
OptionalParameterNode.
Definition ast.h:6522
struct pm_node * value
OptionalParameterNode::value.
Definition ast.h:6545
pm_constant_id_t name
OptionalParameterNode::name.
Definition ast.h:6530
The options that can be passed to the parser.
Definition options.h:110
pm_options_version_t version
The version of prism that we should be parsing with.
Definition options.h:156
OrNode.
Definition ast.h:6560
struct pm_node * left
OrNode::left.
Definition ast.h:6576
struct pm_node * right
OrNode::right.
Definition ast.h:6589
ParametersNode.
Definition ast.h:6615
struct pm_node * rest
ParametersNode::rest.
Definition ast.h:6633
struct pm_node_list requireds
ParametersNode::requireds.
Definition ast.h:6623
struct pm_block_parameter_node * block
ParametersNode::block.
Definition ast.h:6653
struct pm_node_list optionals
ParametersNode::optionals.
Definition ast.h:6628
struct pm_node_list posts
ParametersNode::posts.
Definition ast.h:6638
pm_node_t base
The embedded base node.
Definition ast.h:6617
struct pm_node * keyword_rest
ParametersNode::keyword_rest.
Definition ast.h:6648
struct pm_node_list keywords
ParametersNode::keywords.
Definition ast.h:6643
ParenthesesNode.
Definition ast.h:6671
struct pm_node * body
ParenthesesNode::body.
Definition ast.h:6679
The format that will be used to format the errors into the output.
size_t blank_prefix_length
The length of the blank prefix.
const char * blank_prefix
The prefix that will be used for blank lines.
size_t divider_length
The length of the divider.
const char * number_prefix
The prefix that will be used for line numbers.
const char * divider
The divider that will be used between sections of source code.
An error that is going to be formatted into the output.
pm_diagnostic_t * error
A pointer to the diagnostic that was generated during parsing.
uint32_t column_end
The column end of the diagnostic message.
int32_t line
The start line of the diagnostic message.
uint32_t column_start
The column start of the diagnostic message.
bool parsed
Whether or not this parse result has performed its parsing yet.
pm_scope_node_t node
The resulting scope node that will hold the generated AST.
pm_string_t input
The input that represents the source to be parsed.
pm_parser_t parser
The parser that will do the actual parsing.
pm_options_t options
The options that will be passed to the parser.
This struct represents the overall parser.
Definition parser.h:643
const pm_encoding_t * encoding
The encoding functions for the current file is attached to the parser as it's parsing so that it can ...
Definition parser.h:758
const uint8_t * end
The pointer to the end of the source.
Definition parser.h:697
pm_constant_pool_t constant_pool
This constant pool keeps all of the constants defined throughout the file so that we can reference th...
Definition parser.h:789
const uint8_t * start
The pointer to the start of the source.
Definition parser.h:694
pm_list_t error_list
The list of errors that have been found while parsing.
Definition parser.h:737
pm_list_t warning_list
The list of warnings that have been found while parsing.
Definition parser.h:734
int32_t start_line
The line number at the start of the parse.
Definition parser.h:812
pm_string_t filepath
This is the path of the file being parsed.
Definition parser.h:783
pm_newline_list_t newline_list
This is the list of newline offsets in the source file.
Definition parser.h:792
PinnedExpressionNode.
Definition ast.h:6704
PinnedVariableNode.
Definition ast.h:6762
struct pm_node * variable
PinnedVariableNode::variable.
Definition ast.h:6775
PostExecutionNode.
Definition ast.h:6800
struct pm_statements_node * statements
PostExecutionNode::statements.
Definition ast.h:6808
PreExecutionNode.
Definition ast.h:6838
struct pm_statements_node * statements
PreExecutionNode::statements.
Definition ast.h:6846
ProgramNode.
Definition ast.h:6873
struct pm_statements_node * statements
ProgramNode::statements.
Definition ast.h:6886
RangeNode.
Definition ast.h:6907
struct pm_node * right
RangeNode::right.
Definition ast.h:6937
struct pm_node * left
RangeNode::left.
Definition ast.h:6923
RationalNode.
Definition ast.h:6965
pm_integer_t denominator
RationalNode::denominator.
Definition ast.h:6986
pm_integer_t numerator
RationalNode::numerator.
Definition ast.h:6977
RegularExpressionNode.
Definition ast.h:7032
RequiredKeywordParameterNode.
Definition ast.h:7074
RequiredParameterNode.
Definition ast.h:7106
pm_constant_id_t name
RequiredParameterNode::name.
Definition ast.h:7114
RescueModifierNode.
Definition ast.h:7129
struct pm_node * rescue_expression
RescueModifierNode::rescue_expression.
Definition ast.h:7147
struct pm_node * expression
RescueModifierNode::expression.
Definition ast.h:7137
RescueNode.
Definition ast.h:7167
struct pm_rescue_node * subsequent
RescueNode::subsequent.
Definition ast.h:7205
struct pm_node * reference
RescueNode::reference.
Definition ast.h:7190
struct pm_node_list exceptions
RescueNode::exceptions.
Definition ast.h:7180
struct pm_statements_node * statements
RescueNode::statements.
Definition ast.h:7200
RestParameterNode.
Definition ast.h:7224
ReturnNode.
Definition ast.h:7275
struct pm_arguments_node * arguments
ReturnNode::arguments.
Definition ast.h:7288
rb_encoding * filepath_encoding
This is the encoding of the actual filepath object that will be used when a FILE node is compiled or ...
struct iseq_link_anchor * pre_execution_anchor
This will only be set on the top-level scope node.
VALUE * script_lines
This is a pointer to the list of script lines for the ISEQs that will be associated with this scope n...
ShareableConstantNode.
Definition ast.h:7327
struct pm_node * write
ShareableConstantNode::write.
Definition ast.h:7337
pm_node_t base
The embedded base node.
Definition ast.h:7329
SingletonClassNode.
Definition ast.h:7352
pm_constant_id_list_t locals
SingletonClassNode::locals.
Definition ast.h:7360
struct pm_node * expression
SingletonClassNode::expression.
Definition ast.h:7375
struct pm_node * body
SingletonClassNode::body.
Definition ast.h:7380
SourceFileNode.
Definition ast.h:7424
pm_string_t filepath
SourceFileNode::filepath.
Definition ast.h:7434
SplatNode.
Definition ast.h:7467
struct pm_node * expression
SplatNode::expression.
Definition ast.h:7480
StatementsNode.
Definition ast.h:7495
struct pm_node_list body
StatementsNode::body.
Definition ast.h:7503
pm_node_t base
The embedded base node.
Definition ast.h:7497
StringNode.
Definition ast.h:7530
pm_string_t unescaped
StringNode::unescaped.
Definition ast.h:7553
A generic string type that can have various ownership semantics.
Definition pm_string.h:33
SuperNode.
Definition ast.h:7573
struct pm_arguments_node * arguments
SuperNode::arguments.
Definition ast.h:7593
struct pm_node * block
SuperNode::block.
Definition ast.h:7603
SymbolNode.
Definition ast.h:7626
pm_string_t unescaped
SymbolNode::unescaped.
Definition ast.h:7649
pm_node_t base
The embedded base node.
Definition ast.h:7628
UndefNode.
Definition ast.h:7682
struct pm_node_list names
UndefNode::names.
Definition ast.h:7690
UnlessNode.
Definition ast.h:7713
struct pm_statements_node * statements
UnlessNode::statements.
Definition ast.h:7763
struct pm_node * predicate
UnlessNode::predicate.
Definition ast.h:7742
struct pm_else_node * else_clause
UnlessNode::else_clause.
Definition ast.h:7773
UntilNode.
Definition ast.h:7804
pm_node_t base
The embedded base node.
Definition ast.h:7806
WhenNode.
Definition ast.h:7849
WhileNode.
Definition ast.h:7893
pm_node_t base
The embedded base node.
Definition ast.h:7895
XStringNode.
Definition ast.h:7940
pm_string_t unescaped
XStringNode::unescaped.
Definition ast.h:7963
YieldNode.
Definition ast.h:7978
struct pm_arguments_node * arguments
YieldNode::arguments.
Definition ast.h:7996
Definition st.h:79
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376