Ruby 3.5.0dev (2025-06-27 revision 64a52c25fef8e156630fea559ced7286fe5c3beb)
enumerator.c (64a52c25fef8e156630fea559ced7286fe5c3beb)
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include "id.h"
22#include "internal.h"
23#include "internal/class.h"
24#include "internal/enumerator.h"
25#include "internal/error.h"
26#include "internal/hash.h"
27#include "internal/imemo.h"
28#include "internal/numeric.h"
29#include "internal/range.h"
30#include "internal/rational.h"
31#include "ruby/ruby.h"
32
33/*
34 * Document-class: Enumerator
35 *
36 * A class which allows both internal and external iteration.
37 *
38 * An Enumerator can be created by the following methods.
39 * - Object#to_enum
40 * - Object#enum_for
41 * - Enumerator.new
42 *
43 * Most methods have two forms: a block form where the contents
44 * are evaluated for each item in the enumeration, and a non-block form
45 * which returns a new Enumerator wrapping the iteration.
46 *
47 * enumerator = %w(one two three).each
48 * puts enumerator.class # => Enumerator
49 *
50 * enumerator.each_with_object("foo") do |item, obj|
51 * puts "#{obj}: #{item}"
52 * end
53 *
54 * # foo: one
55 * # foo: two
56 * # foo: three
57 *
58 * enum_with_obj = enumerator.each_with_object("foo")
59 * puts enum_with_obj.class # => Enumerator
60 *
61 * enum_with_obj.each do |item, obj|
62 * puts "#{obj}: #{item}"
63 * end
64 *
65 * # foo: one
66 * # foo: two
67 * # foo: three
68 *
69 * This allows you to chain Enumerators together. For example, you
70 * can map a list's elements to strings containing the index
71 * and the element as a string via:
72 *
73 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
74 * # => ["0:foo", "1:bar", "2:baz"]
75 *
76 * == External Iteration
77 *
78 * An Enumerator can also be used as an external iterator.
79 * For example, Enumerator#next returns the next value of the iterator
80 * or raises StopIteration if the Enumerator is at the end.
81 *
82 * e = [1,2,3].each # returns an enumerator object.
83 * puts e.next # => 1
84 * puts e.next # => 2
85 * puts e.next # => 3
86 * puts e.next # raises StopIteration
87 *
88 * +next+, +next_values+, +peek+, and +peek_values+ are the only methods
89 * which use external iteration (and Array#zip(Enumerable-not-Array) which uses +next+ internally).
90 *
91 * These methods do not affect other internal enumeration methods,
92 * unless the underlying iteration method itself has side-effect, e.g. IO#each_line.
93 *
94 * FrozenError will be raised if these methods are called against a frozen enumerator.
95 * Since +rewind+ and +feed+ also change state for external iteration,
96 * these methods may raise FrozenError too.
97 *
98 * External iteration differs *significantly* from internal iteration
99 * due to using a Fiber:
100 * - The Fiber adds some overhead compared to internal enumeration.
101 * - The stacktrace will only include the stack from the Enumerator, not above.
102 * - Fiber-local variables are *not* inherited inside the Enumerator Fiber,
103 * which instead starts with no Fiber-local variables.
104 * - Fiber storage variables *are* inherited and are designed
105 * to handle Enumerator Fibers. Assigning to a Fiber storage variable
106 * only affects the current Fiber, so if you want to change state
107 * in the caller Fiber of the Enumerator Fiber, you need to use an
108 * extra indirection (e.g., use some object in the Fiber storage
109 * variable and mutate some ivar of it).
110 *
111 * Concretely:
112 *
113 * Thread.current[:fiber_local] = 1
114 * Fiber[:storage_var] = 1
115 * e = Enumerator.new do |y|
116 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
117 * p Fiber[:storage_var] # => 1, inherited
118 * Fiber[:storage_var] += 1
119 * y << 42
120 * end
121 *
122 * p e.next # => 42
123 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
124 *
125 * e.each { p _1 }
126 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
127 *
128 * == Convert External Iteration to Internal Iteration
129 *
130 * You can use an external iterator to implement an internal iterator as follows:
131 *
132 * def ext_each(e)
133 * while true
134 * begin
135 * vs = e.next_values
136 * rescue StopIteration
137 * return $!.result
138 * end
139 * y = yield(*vs)
140 * e.feed y
141 * end
142 * end
143 *
144 * o = Object.new
145 *
146 * def o.each
147 * puts yield
148 * puts yield(1)
149 * puts yield(1, 2)
150 * 3
151 * end
152 *
153 * # use o.each as an internal iterator directly.
154 * puts o.each {|*x| puts x; [:b, *x] }
155 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
156 *
157 * # convert o.each to an external iterator for
158 * # implementing an internal iterator.
159 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
160 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
161 *
162 */
164static VALUE rb_cLazy;
165static ID id_rewind, id_to_enum, id_each_entry;
166static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
167static VALUE sym_each, sym_yield;
168
169static VALUE lazy_use_super_method;
170
171extern ID ruby_static_id_cause;
172
173#define id_call idCall
174#define id_cause ruby_static_id_cause
175#define id_each idEach
176#define id_eqq idEqq
177#define id_initialize idInitialize
178#define id_size idSize
179
181
183 VALUE obj;
184 ID meth;
185 VALUE args;
186 VALUE fib;
187 VALUE dst;
188 VALUE lookahead;
189 VALUE feedvalue;
190 VALUE stop_exc;
191 VALUE size;
192 VALUE procs;
194 int kw_splat;
195};
196
197RUBY_REFERENCES(enumerator_refs) = {
198 RUBY_REF_EDGE(struct enumerator, obj),
199 RUBY_REF_EDGE(struct enumerator, args),
200 RUBY_REF_EDGE(struct enumerator, fib),
201 RUBY_REF_EDGE(struct enumerator, dst),
202 RUBY_REF_EDGE(struct enumerator, lookahead),
203 RUBY_REF_EDGE(struct enumerator, feedvalue),
204 RUBY_REF_EDGE(struct enumerator, stop_exc),
205 RUBY_REF_EDGE(struct enumerator, size),
206 RUBY_REF_EDGE(struct enumerator, procs),
207 RUBY_REF_END
208};
209
210static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
211
212struct generator {
213 VALUE proc;
214 VALUE obj;
215};
216
217struct yielder {
218 VALUE proc;
219};
220
221struct producer {
222 VALUE init;
223 VALUE proc;
224};
225
226typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
227typedef VALUE lazyenum_size_func(VALUE, VALUE);
228typedef int lazyenum_precheck_func(VALUE proc_entry);
229typedef struct {
230 lazyenum_proc_func *proc;
231 lazyenum_size_func *size;
232 lazyenum_precheck_func *precheck;
234
236 VALUE proc;
237 VALUE memo;
238 const lazyenum_funcs *fn;
239};
240
241static VALUE generator_allocate(VALUE klass);
242static VALUE generator_init(VALUE obj, VALUE proc);
243
244static VALUE rb_cEnumChain;
245
247 VALUE enums;
248 long pos;
249};
250
251static VALUE rb_cEnumProduct;
252
254 VALUE enums;
255};
256
257VALUE rb_cArithSeq;
258
259static const rb_data_type_t enumerator_data_type = {
260 "enumerator",
261 {
262 RUBY_REFS_LIST_PTR(enumerator_refs),
264 NULL, // Nothing allocated externally, so don't need a memsize function
265 NULL,
266 },
267 0, NULL, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
268};
269
270static struct enumerator *
271enumerator_ptr(VALUE obj)
272{
273 struct enumerator *ptr;
274
275 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
276 if (!ptr || UNDEF_P(ptr->obj)) {
277 rb_raise(rb_eArgError, "uninitialized enumerator");
278 }
279 return ptr;
280}
281
282static void
283proc_entry_mark(void *p)
284{
285 struct proc_entry *ptr = p;
286 rb_gc_mark_movable(ptr->proc);
287 rb_gc_mark_movable(ptr->memo);
288}
289
290static void
291proc_entry_compact(void *p)
292{
293 struct proc_entry *ptr = p;
294 ptr->proc = rb_gc_location(ptr->proc);
295 ptr->memo = rb_gc_location(ptr->memo);
296}
297
298static const rb_data_type_t proc_entry_data_type = {
299 "proc_entry",
300 {
301 proc_entry_mark,
303 NULL, // Nothing allocated externally, so don't need a memsize function
304 proc_entry_compact,
305 },
306 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
307};
308
309static struct proc_entry *
310proc_entry_ptr(VALUE proc_entry)
311{
312 struct proc_entry *ptr;
313
314 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
315
316 return ptr;
317}
318
319/*
320 * call-seq:
321 * obj.to_enum(method = :each, *args) -> enum
322 * obj.enum_for(method = :each, *args) -> enum
323 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
324 * obj.enum_for(method = :each, *args){|*args| block} -> enum
325 *
326 * Creates a new Enumerator which will enumerate by calling +method+ on
327 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
328 * values of enumerator.
329 *
330 * If a block is given, it will be used to calculate the size of
331 * the enumerator without the need to iterate it (see Enumerator#size).
332 *
333 * === Examples
334 *
335 * str = "xyz"
336 *
337 * enum = str.enum_for(:each_byte)
338 * enum.each { |b| puts b }
339 * # => 120
340 * # => 121
341 * # => 122
342 *
343 * # protect an array from being modified by some_method
344 * a = [1, 2, 3]
345 * some_method(a.to_enum)
346 *
347 * # String#split in block form is more memory-effective:
348 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
349 * # This could be rewritten more idiomatically with to_enum:
350 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
351 *
352 * It is typical to call to_enum when defining methods for
353 * a generic Enumerable, in case no block is passed.
354 *
355 * Here is such an example, with parameter passing and a sizing block:
356 *
357 * module Enumerable
358 * # a generic method to repeat the values of any enumerable
359 * def repeat(n)
360 * raise ArgumentError, "#{n} is negative!" if n < 0
361 * unless block_given?
362 * return to_enum(__method__, n) do # __method__ is :repeat here
363 * sz = size # Call size and multiply by n...
364 * sz * n if sz # but return nil if size itself is nil
365 * end
366 * end
367 * each do |*val|
368 * n.times { yield *val }
369 * end
370 * end
371 * end
372 *
373 * %i[hello world].repeat(2) { |w| puts w }
374 * # => Prints 'hello', 'hello', 'world', 'world'
375 * enum = (1..14).repeat(3)
376 * # => returns an Enumerator when called without a block
377 * enum.first(4) # => [1, 1, 1, 2]
378 * enum.size # => 42
379 */
380static VALUE
381obj_to_enum(int argc, VALUE *argv, VALUE obj)
382{
383 VALUE enumerator, meth = sym_each;
384
385 if (argc > 0) {
386 --argc;
387 meth = *argv++;
388 }
389 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
390 if (rb_block_given_p()) {
391 RB_OBJ_WRITE(enumerator, &enumerator_ptr(enumerator)->size, rb_block_proc());
392 }
393 return enumerator;
394}
395
396static VALUE
397enumerator_allocate(VALUE klass)
398{
399 struct enumerator *ptr;
400 VALUE enum_obj;
401
402 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
403 ptr->obj = Qundef;
404
405 return enum_obj;
406}
407
408static VALUE
409enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
410{
411 struct enumerator *ptr;
412
413 rb_check_frozen(enum_obj);
414 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
415
416 if (!ptr) {
417 rb_raise(rb_eArgError, "unallocated enumerator");
418 }
419
420 RB_OBJ_WRITE(enum_obj, &ptr->obj, obj);
421 ptr->meth = rb_to_id(meth);
422 if (argc) RB_OBJ_WRITE(enum_obj, &ptr->args, rb_ary_new4(argc, argv));
423 ptr->fib = 0;
424 ptr->dst = Qnil;
425 ptr->lookahead = Qundef;
426 ptr->feedvalue = Qundef;
427 ptr->stop_exc = Qfalse;
428 RB_OBJ_WRITE(enum_obj, &ptr->size, size);
429 ptr->size_fn = size_fn;
430 ptr->kw_splat = kw_splat;
431
432 return enum_obj;
433}
434
435static VALUE
436convert_to_feasible_size_value(VALUE obj)
437{
438 if (NIL_P(obj)) {
439 return obj;
440 }
441 else if (rb_respond_to(obj, id_call)) {
442 return obj;
443 }
444 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
445 return obj;
446 }
447 else {
448 return rb_to_int(obj);
449 }
450}
451
452/*
453 * call-seq:
454 * Enumerator.new(size = nil) { |yielder| ... }
455 *
456 * Creates a new Enumerator object, which can be used as an
457 * Enumerable.
458 *
459 * Iteration is defined by the given block, in
460 * which a "yielder" object, given as block parameter, can be used to
461 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
462 *
463 * fib = Enumerator.new do |y|
464 * a = b = 1
465 * loop do
466 * y << a
467 * a, b = b, a + b
468 * end
469 * end
470 *
471 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
472 *
473 * The optional parameter can be used to specify how to calculate the size
474 * in a lazy fashion (see Enumerator#size). It can either be a value or
475 * a callable object.
476 */
477static VALUE
478enumerator_initialize(int argc, VALUE *argv, VALUE obj)
479{
480 VALUE iter = rb_block_proc();
481 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
482 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
483 VALUE size = convert_to_feasible_size_value(arg0);
484
485 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
486}
487
488/* :nodoc: */
489static VALUE
490enumerator_init_copy(VALUE obj, VALUE orig)
491{
492 struct enumerator *ptr0, *ptr1;
493
494 if (!OBJ_INIT_COPY(obj, orig)) return obj;
495 ptr0 = enumerator_ptr(orig);
496 if (ptr0->fib) {
497 /* Fibers cannot be copied */
498 rb_raise(rb_eTypeError, "can't copy execution context");
499 }
500
501 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
502
503 if (!ptr1) {
504 rb_raise(rb_eArgError, "unallocated enumerator");
505 }
506
507 RB_OBJ_WRITE(obj, &ptr1->obj, ptr0->obj);
508 ptr1->meth = ptr0->meth;
509 RB_OBJ_WRITE(obj, &ptr1->args, ptr0->args);
510 ptr1->fib = 0;
511 ptr1->lookahead = Qundef;
512 ptr1->feedvalue = Qundef;
513 RB_OBJ_WRITE(obj, &ptr1->size, ptr0->size);
514 ptr1->size_fn = ptr0->size_fn;
515
516 return obj;
517}
518
519/*
520 * For backwards compatibility; use rb_enumeratorize_with_size
521 */
522VALUE
523rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
524{
525 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
526}
527
528static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
529static int lazy_precheck(VALUE procs);
530
531VALUE
532rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
533{
534 VALUE base_class = rb_cEnumerator;
535
536 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
537 base_class = rb_cLazy;
538 }
539 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
540 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
541 }
542
543 return enumerator_init(enumerator_allocate(base_class),
544 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
545}
546
547VALUE
548rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
549{
550 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
551}
552
553static VALUE
554enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
555{
556 int argc = 0;
557 const VALUE *argv = 0;
558 const struct enumerator *e = enumerator_ptr(obj);
559 ID meth = e->meth;
560
561 VALUE args = e->args;
562 if (args) {
563 argc = RARRAY_LENINT(args);
564 argv = RARRAY_CONST_PTR(args);
565 }
566
567 VALUE ret = rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
568
569 RB_GC_GUARD(args);
570
571 return ret;
572}
573
574/*
575 * call-seq:
576 * enum.each { |elm| block } -> obj
577 * enum.each -> enum
578 * enum.each(*appending_args) { |elm| block } -> obj
579 * enum.each(*appending_args) -> an_enumerator
580 *
581 * Iterates over the block according to how this Enumerator was constructed.
582 * If no block and no arguments are given, returns self.
583 *
584 * === Examples
585 *
586 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
587 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
588 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
589 *
590 * obj = Object.new
591 *
592 * def obj.each_arg(a, b=:b, *rest)
593 * yield a
594 * yield b
595 * yield rest
596 * :method_returned
597 * end
598 *
599 * enum = obj.to_enum :each_arg, :a, :x
600 *
601 * enum.each.to_a #=> [:a, :x, []]
602 * enum.each.equal?(enum) #=> true
603 * enum.each { |elm| elm } #=> :method_returned
604 *
605 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
606 * enum.each(:y, :z).equal?(enum) #=> false
607 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
608 *
609 */
610static VALUE
611enumerator_each(int argc, VALUE *argv, VALUE obj)
612{
613 struct enumerator *e = enumerator_ptr(obj);
614
615 if (argc > 0) {
616 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
617 if (args) {
618#if SIZEOF_INT < SIZEOF_LONG
619 /* check int range overflow */
620 rb_long2int(RARRAY_LEN(args) + argc);
621#endif
622 args = rb_ary_dup(args);
623 rb_ary_cat(args, argv, argc);
624 }
625 else {
626 args = rb_ary_new4(argc, argv);
627 }
628 RB_OBJ_WRITE(obj, &e->args, args);
629 e->size = Qnil;
630 e->size_fn = 0;
631 }
632 if (!rb_block_given_p()) return obj;
633
634 if (!lazy_precheck(e->procs)) return Qnil;
635
636 return enumerator_block_call(obj, 0, obj);
637}
638
639static VALUE
640enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
641{
642 struct MEMO *memo = (struct MEMO *)m;
643 VALUE idx = memo->v1;
644 MEMO_V1_SET(memo, rb_int_succ(idx));
645
646 if (argc <= 1)
647 return rb_yield_values(2, val, idx);
648
649 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
650}
651
652static VALUE
653enumerator_size(VALUE obj);
654
655static VALUE
656enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
657{
658 return enumerator_size(obj);
659}
660
661/*
662 * call-seq:
663 * e.with_index(offset = 0) {|(*args), idx| ... }
664 * e.with_index(offset = 0)
665 *
666 * Iterates the given block for each element with an index, which
667 * starts from +offset+. If no block is given, returns a new Enumerator
668 * that includes the index, starting from +offset+
669 *
670 * +offset+:: the starting index to use
671 *
672 */
673static VALUE
674enumerator_with_index(int argc, VALUE *argv, VALUE obj)
675{
676 VALUE memo;
677
678 rb_check_arity(argc, 0, 1);
679 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
680 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
681 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)MEMO_NEW(memo, 0, 0));
682}
683
684/*
685 * call-seq:
686 * e.each_with_index {|(*args), idx| ... }
687 * e.each_with_index
688 *
689 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
690 *
691 * If no block is given, a new Enumerator is returned that includes the index.
692 *
693 */
694static VALUE
695enumerator_each_with_index(VALUE obj)
696{
697 return enumerator_with_index(0, NULL, obj);
698}
699
700static VALUE
701enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
702{
703 if (argc <= 1)
704 return rb_yield_values(2, val, memo);
705
706 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
707}
708
709/*
710 * call-seq:
711 * e.each_with_object(obj) {|(*args), obj| ... }
712 * e.each_with_object(obj)
713 * e.with_object(obj) {|(*args), obj| ... }
714 * e.with_object(obj)
715 *
716 * Iterates the given block for each element with an arbitrary object, +obj+,
717 * and returns +obj+
718 *
719 * If no block is given, returns a new Enumerator.
720 *
721 * === Example
722 *
723 * to_three = Enumerator.new do |y|
724 * 3.times do |x|
725 * y << x
726 * end
727 * end
728 *
729 * to_three_with_string = to_three.with_object("foo")
730 * to_three_with_string.each do |x,string|
731 * puts "#{string}: #{x}"
732 * end
733 *
734 * # => foo: 0
735 * # => foo: 1
736 * # => foo: 2
737 */
738static VALUE
739enumerator_with_object(VALUE obj, VALUE memo)
740{
741 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
742 enumerator_block_call(obj, enumerator_with_object_i, memo);
743
744 return memo;
745}
746
747static VALUE
748next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
749{
750 struct enumerator *e = enumerator_ptr(obj);
751 VALUE feedvalue = Qnil;
752 VALUE args = rb_ary_new4(argc, argv);
753 rb_fiber_yield(1, &args);
754 if (!UNDEF_P(e->feedvalue)) {
755 feedvalue = e->feedvalue;
756 e->feedvalue = Qundef;
757 }
758 return feedvalue;
759}
760
761static VALUE
762next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
763{
764 struct enumerator *e = enumerator_ptr(obj);
765 VALUE nil = Qnil;
766 VALUE result;
767
768 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
769 RB_OBJ_WRITE(obj, &e->stop_exc, rb_exc_new2(rb_eStopIteration, "iteration reached an end"));
770 rb_ivar_set(e->stop_exc, id_result, result);
771 return rb_fiber_yield(1, &nil);
772}
773
774static void
775next_init(VALUE obj, struct enumerator *e)
776{
777 VALUE curr = rb_fiber_current();
778 RB_OBJ_WRITE(obj, &e->dst, curr);
779 RB_OBJ_WRITE(obj, &e->fib, rb_fiber_new(next_i, obj));
780 e->lookahead = Qundef;
781}
782
783static VALUE
784get_next_values(VALUE obj, struct enumerator *e)
785{
786 VALUE curr, vs;
787
788 if (e->stop_exc) {
789 VALUE exc = e->stop_exc;
790 VALUE result = rb_attr_get(exc, id_result);
791 VALUE mesg = rb_attr_get(exc, idMesg);
792 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
793 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
794 rb_ivar_set(stop_exc, id_cause, exc);
795 rb_ivar_set(stop_exc, id_result, result);
796 rb_exc_raise(stop_exc);
797 }
798
799 curr = rb_fiber_current();
800
801 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
802 next_init(obj, e);
803 }
804
805 vs = rb_fiber_resume(e->fib, 1, &curr);
806 if (e->stop_exc) {
807 e->fib = 0;
808 e->dst = Qnil;
809 e->lookahead = Qundef;
810 e->feedvalue = Qundef;
811 rb_exc_raise(e->stop_exc);
812 }
813 return vs;
814}
815
816/*
817 * call-seq:
818 * e.next_values -> array
819 *
820 * Returns the next object as an array in the enumerator, and move the
821 * internal position forward. When the position reached at the end,
822 * StopIteration is raised.
823 *
824 * See class-level notes about external iterators.
825 *
826 * This method can be used to distinguish <code>yield</code> and <code>yield
827 * nil</code>.
828 *
829 * === Example
830 *
831 * o = Object.new
832 * def o.each
833 * yield
834 * yield 1
835 * yield 1, 2
836 * yield nil
837 * yield [1, 2]
838 * end
839 * e = o.to_enum
840 * p e.next_values
841 * p e.next_values
842 * p e.next_values
843 * p e.next_values
844 * p e.next_values
845 * e = o.to_enum
846 * p e.next
847 * p e.next
848 * p e.next
849 * p e.next
850 * p e.next
851 *
852 * ## yield args next_values next
853 * # yield [] nil
854 * # yield 1 [1] 1
855 * # yield 1, 2 [1, 2] [1, 2]
856 * # yield nil [nil] nil
857 * # yield [1, 2] [[1, 2]] [1, 2]
858 *
859 */
860
861static VALUE
862enumerator_next_values(VALUE obj)
863{
864 struct enumerator *e = enumerator_ptr(obj);
865 VALUE vs;
866
867 rb_check_frozen(obj);
868
869 if (!UNDEF_P(e->lookahead)) {
870 vs = e->lookahead;
871 e->lookahead = Qundef;
872 return vs;
873 }
874
875 return get_next_values(obj, e);
876}
877
878static VALUE
879ary2sv(VALUE args, int dup)
880{
881 if (!RB_TYPE_P(args, T_ARRAY))
882 return args;
883
884 switch (RARRAY_LEN(args)) {
885 case 0:
886 return Qnil;
887
888 case 1:
889 return RARRAY_AREF(args, 0);
890
891 default:
892 if (dup)
893 return rb_ary_dup(args);
894 return args;
895 }
896}
897
898/*
899 * call-seq:
900 * e.next -> object
901 *
902 * Returns the next object in the enumerator, and move the internal position
903 * forward. When the position reached at the end, StopIteration is raised.
904 *
905 * === Example
906 *
907 * a = [1,2,3]
908 * e = a.to_enum
909 * p e.next #=> 1
910 * p e.next #=> 2
911 * p e.next #=> 3
912 * p e.next #raises StopIteration
913 *
914 * See class-level notes about external iterators.
915 *
916 */
917
918static VALUE
919enumerator_next(VALUE obj)
920{
921 VALUE vs = enumerator_next_values(obj);
922 return ary2sv(vs, 0);
923}
924
925static VALUE
926enumerator_peek_values(VALUE obj)
927{
928 struct enumerator *e = enumerator_ptr(obj);
929
930 rb_check_frozen(obj);
931
932 if (UNDEF_P(e->lookahead)) {
933 RB_OBJ_WRITE(obj, &e->lookahead, get_next_values(obj, e));
934 }
935
936 return e->lookahead;
937}
938
939/*
940 * call-seq:
941 * e.peek_values -> array
942 *
943 * Returns the next object as an array, similar to Enumerator#next_values, but
944 * doesn't move the internal position forward. If the position is already at
945 * the end, StopIteration is raised.
946 *
947 * See class-level notes about external iterators.
948 *
949 * === Example
950 *
951 * o = Object.new
952 * def o.each
953 * yield
954 * yield 1
955 * yield 1, 2
956 * end
957 * e = o.to_enum
958 * p e.peek_values #=> []
959 * e.next
960 * p e.peek_values #=> [1]
961 * p e.peek_values #=> [1]
962 * e.next
963 * p e.peek_values #=> [1, 2]
964 * e.next
965 * p e.peek_values # raises StopIteration
966 *
967 */
968
969static VALUE
970enumerator_peek_values_m(VALUE obj)
971{
972 return rb_ary_dup(enumerator_peek_values(obj));
973}
974
975/*
976 * call-seq:
977 * e.peek -> object
978 *
979 * Returns the next object in the enumerator, but doesn't move the internal
980 * position forward. If the position is already at the end, StopIteration
981 * is raised.
982 *
983 * See class-level notes about external iterators.
984 *
985 * === Example
986 *
987 * a = [1,2,3]
988 * e = a.to_enum
989 * p e.next #=> 1
990 * p e.peek #=> 2
991 * p e.peek #=> 2
992 * p e.peek #=> 2
993 * p e.next #=> 2
994 * p e.next #=> 3
995 * p e.peek #raises StopIteration
996 *
997 */
998
999static VALUE
1000enumerator_peek(VALUE obj)
1001{
1002 VALUE vs = enumerator_peek_values(obj);
1003 return ary2sv(vs, 1);
1004}
1005
1006/*
1007 * call-seq:
1008 * e.feed obj -> nil
1009 *
1010 * Sets the value to be returned by the next yield inside +e+.
1011 *
1012 * If the value is not set, the yield returns nil.
1013 *
1014 * This value is cleared after being yielded.
1015 *
1016 * # Array#map passes the array's elements to "yield" and collects the
1017 * # results of "yield" as an array.
1018 * # Following example shows that "next" returns the passed elements and
1019 * # values passed to "feed" are collected as an array which can be
1020 * # obtained by StopIteration#result.
1021 * e = [1,2,3].map
1022 * p e.next #=> 1
1023 * e.feed "a"
1024 * p e.next #=> 2
1025 * e.feed "b"
1026 * p e.next #=> 3
1027 * e.feed "c"
1028 * begin
1029 * e.next
1030 * rescue StopIteration
1031 * p $!.result #=> ["a", "b", "c"]
1032 * end
1033 *
1034 * o = Object.new
1035 * def o.each
1036 * x = yield # (2) blocks
1037 * p x # (5) => "foo"
1038 * x = yield # (6) blocks
1039 * p x # (8) => nil
1040 * x = yield # (9) blocks
1041 * p x # not reached w/o another e.next
1042 * end
1043 *
1044 * e = o.to_enum
1045 * e.next # (1)
1046 * e.feed "foo" # (3)
1047 * e.next # (4)
1048 * e.next # (7)
1049 * # (10)
1050 */
1051
1052static VALUE
1053enumerator_feed(VALUE obj, VALUE v)
1054{
1055 struct enumerator *e = enumerator_ptr(obj);
1056
1057 rb_check_frozen(obj);
1058
1059 if (!UNDEF_P(e->feedvalue)) {
1060 rb_raise(rb_eTypeError, "feed value already set");
1061 }
1062 RB_OBJ_WRITE(obj, &e->feedvalue, v);
1063
1064 return Qnil;
1065}
1066
1067/*
1068 * call-seq:
1069 * e.rewind -> e
1070 *
1071 * Rewinds the enumeration sequence to the beginning.
1072 *
1073 * If the enclosed object responds to a "rewind" method, it is called.
1074 */
1075
1076static VALUE
1077enumerator_rewind(VALUE obj)
1078{
1079 struct enumerator *e = enumerator_ptr(obj);
1080
1081 rb_check_frozen(obj);
1082
1083 rb_check_funcall(e->obj, id_rewind, 0, 0);
1084
1085 e->fib = 0;
1086 e->dst = Qnil;
1087 e->lookahead = Qundef;
1088 e->feedvalue = Qundef;
1089 e->stop_exc = Qfalse;
1090 return obj;
1091}
1092
1093static struct generator *generator_ptr(VALUE obj);
1094static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1095
1096static VALUE
1097inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1098{
1099 struct enumerator *e;
1100 VALUE eobj, str, cname;
1101
1102 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1103
1104 cname = rb_obj_class(obj);
1105
1106 if (!e || UNDEF_P(e->obj)) {
1107 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1108 }
1109
1110 if (recur) {
1111 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1112 return str;
1113 }
1114
1115 if (e->procs) {
1116 long i;
1117
1118 eobj = generator_ptr(e->obj)->obj;
1119 /* In case procs chained enumerator traversing all proc entries manually */
1120 if (rb_obj_class(eobj) == cname) {
1121 str = rb_inspect(eobj);
1122 }
1123 else {
1124 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1125 }
1126 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1127 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1128 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1129 rb_str_buf_cat2(str, ">");
1130 }
1131 return str;
1132 }
1133
1134 eobj = rb_attr_get(obj, id_receiver);
1135 if (NIL_P(eobj)) {
1136 eobj = e->obj;
1137 }
1138
1139 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1140 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1141 append_method(obj, str, e->meth, e->args);
1142
1143 rb_str_buf_cat2(str, ">");
1144
1145 return str;
1146}
1147
1148static int
1149key_symbol_p(VALUE key, VALUE val, VALUE arg)
1150{
1151 if (SYMBOL_P(key)) return ST_CONTINUE;
1152 *(int *)arg = FALSE;
1153 return ST_STOP;
1154}
1155
1156static int
1157kwd_append(VALUE key, VALUE val, VALUE str)
1158{
1159 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1160 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1161 return ST_CONTINUE;
1162}
1163
1164static VALUE
1165append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1166{
1167 VALUE method, eargs;
1168
1169 method = rb_attr_get(obj, id_method);
1170 if (method != Qfalse) {
1171 if (!NIL_P(method)) {
1172 Check_Type(method, T_SYMBOL);
1173 method = rb_sym2str(method);
1174 }
1175 else {
1176 method = rb_id2str(default_method);
1177 }
1178 rb_str_buf_cat2(str, ":");
1179 rb_str_buf_append(str, method);
1180 }
1181
1182 eargs = rb_attr_get(obj, id_arguments);
1183 if (NIL_P(eargs)) {
1184 eargs = default_args;
1185 }
1186 if (eargs != Qfalse) {
1187 long argc = RARRAY_LEN(eargs);
1188 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1189
1190 if (argc > 0) {
1191 VALUE kwds = Qnil;
1192
1193 rb_str_buf_cat2(str, "(");
1194
1195 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1196 int all_key = TRUE;
1197 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1198 if (all_key) kwds = argv[--argc];
1199 }
1200
1201 while (argc--) {
1202 VALUE arg = *argv++;
1203
1204 rb_str_append(str, rb_inspect(arg));
1205 rb_str_buf_cat2(str, ", ");
1206 }
1207 if (!NIL_P(kwds)) {
1208 rb_hash_foreach(kwds, kwd_append, str);
1209 }
1210 rb_str_set_len(str, RSTRING_LEN(str)-2);
1211 rb_str_buf_cat2(str, ")");
1212 }
1213 }
1214
1215 return str;
1216}
1217
1218/*
1219 * call-seq:
1220 * e.inspect -> string
1221 *
1222 * Creates a printable version of <i>e</i>.
1223 */
1224
1225static VALUE
1226enumerator_inspect(VALUE obj)
1227{
1228 return rb_exec_recursive(inspect_enumerator, obj, 0);
1229}
1230
1231/*
1232 * call-seq:
1233 * e.size -> int, Float::INFINITY or nil
1234 *
1235 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1236 *
1237 * (1..100).to_a.permutation(4).size # => 94109400
1238 * loop.size # => Float::INFINITY
1239 * (1..100).drop_while.size # => nil
1240 */
1241
1242static VALUE
1243enumerator_size(VALUE obj)
1244{
1245 struct enumerator *e = enumerator_ptr(obj);
1246 int argc = 0;
1247 const VALUE *argv = NULL;
1248 VALUE size;
1249
1250 if (e->procs) {
1251 struct generator *g = generator_ptr(e->obj);
1252 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1253 long i = 0;
1254
1255 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1256 VALUE proc = RARRAY_AREF(e->procs, i);
1257 struct proc_entry *entry = proc_entry_ptr(proc);
1258 lazyenum_size_func *size_fn = entry->fn->size;
1259 if (!size_fn) {
1260 return Qnil;
1261 }
1262 receiver = (*size_fn)(proc, receiver);
1263 }
1264 return receiver;
1265 }
1266
1267 if (e->size_fn) {
1268 return (*e->size_fn)(e->obj, e->args, obj);
1269 }
1270 if (e->args) {
1271 argc = (int)RARRAY_LEN(e->args);
1272 argv = RARRAY_CONST_PTR(e->args);
1273 }
1274 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1275 if (!UNDEF_P(size)) return size;
1276 return e->size;
1277}
1278
1279/*
1280 * Yielder
1281 */
1282static void
1283yielder_mark(void *p)
1284{
1285 struct yielder *ptr = p;
1286 rb_gc_mark_movable(ptr->proc);
1287}
1288
1289static void
1290yielder_compact(void *p)
1291{
1292 struct yielder *ptr = p;
1293 ptr->proc = rb_gc_location(ptr->proc);
1294}
1295
1296static const rb_data_type_t yielder_data_type = {
1297 "yielder",
1298 {
1299 yielder_mark,
1301 NULL,
1302 yielder_compact,
1303 },
1304 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1305};
1306
1307static struct yielder *
1308yielder_ptr(VALUE obj)
1309{
1310 struct yielder *ptr;
1311
1312 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1313 if (!ptr || UNDEF_P(ptr->proc)) {
1314 rb_raise(rb_eArgError, "uninitialized yielder");
1315 }
1316 return ptr;
1317}
1318
1319/* :nodoc: */
1320static VALUE
1321yielder_allocate(VALUE klass)
1322{
1323 struct yielder *ptr;
1324 VALUE obj;
1325
1326 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1327 ptr->proc = Qundef;
1328
1329 return obj;
1330}
1331
1332static VALUE
1333yielder_init(VALUE obj, VALUE proc)
1334{
1335 struct yielder *ptr;
1336
1337 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1338
1339 if (!ptr) {
1340 rb_raise(rb_eArgError, "unallocated yielder");
1341 }
1342
1343 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1344
1345 return obj;
1346}
1347
1348/* :nodoc: */
1349static VALUE
1350yielder_initialize(VALUE obj)
1351{
1352 rb_need_block();
1353
1354 return yielder_init(obj, rb_block_proc());
1355}
1356
1357/* :nodoc: */
1358static VALUE
1359yielder_yield(VALUE obj, VALUE args)
1360{
1361 struct yielder *ptr = yielder_ptr(obj);
1362
1363 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1364}
1365
1366/* :nodoc: */
1367static VALUE
1368yielder_yield_push(VALUE obj, VALUE arg)
1369{
1370 struct yielder *ptr = yielder_ptr(obj);
1371
1372 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1373
1374 return obj;
1375}
1376
1377/*
1378 * Returns a Proc object that takes arguments and yields them.
1379 *
1380 * This method is implemented so that a Yielder object can be directly
1381 * passed to another method as a block argument.
1382 *
1383 * enum = Enumerator.new { |y|
1384 * Dir.glob("*.rb") { |file|
1385 * File.open(file) { |f| f.each_line(&y) }
1386 * }
1387 * }
1388 */
1389static VALUE
1390yielder_to_proc(VALUE obj)
1391{
1392 VALUE method = rb_obj_method(obj, sym_yield);
1393
1394 return rb_funcall(method, idTo_proc, 0);
1395}
1396
1397static VALUE
1398yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1399{
1400 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1401}
1402
1403static VALUE
1404yielder_new(void)
1405{
1406 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1407}
1408
1409/*
1410 * Generator
1411 */
1412static void
1413generator_mark(void *p)
1414{
1415 struct generator *ptr = p;
1416 rb_gc_mark_movable(ptr->proc);
1417 rb_gc_mark_movable(ptr->obj);
1418}
1419
1420static void
1421generator_compact(void *p)
1422{
1423 struct generator *ptr = p;
1424 ptr->proc = rb_gc_location(ptr->proc);
1425 ptr->obj = rb_gc_location(ptr->obj);
1426}
1427
1428static const rb_data_type_t generator_data_type = {
1429 "generator",
1430 {
1431 generator_mark,
1433 NULL,
1434 generator_compact,
1435 },
1436 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1437};
1438
1439static struct generator *
1440generator_ptr(VALUE obj)
1441{
1442 struct generator *ptr;
1443
1444 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1445 if (!ptr || UNDEF_P(ptr->proc)) {
1446 rb_raise(rb_eArgError, "uninitialized generator");
1447 }
1448 return ptr;
1449}
1450
1451/* :nodoc: */
1452static VALUE
1453generator_allocate(VALUE klass)
1454{
1455 struct generator *ptr;
1456 VALUE obj;
1457
1458 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1459 ptr->proc = Qundef;
1460
1461 return obj;
1462}
1463
1464static VALUE
1465generator_init(VALUE obj, VALUE proc)
1466{
1467 struct generator *ptr;
1468
1469 rb_check_frozen(obj);
1470 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1471
1472 if (!ptr) {
1473 rb_raise(rb_eArgError, "unallocated generator");
1474 }
1475
1476 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1477
1478 return obj;
1479}
1480
1481/* :nodoc: */
1482static VALUE
1483generator_initialize(int argc, VALUE *argv, VALUE obj)
1484{
1485 VALUE proc;
1486
1487 if (argc == 0) {
1488 rb_need_block();
1489
1490 proc = rb_block_proc();
1491 }
1492 else {
1493 rb_scan_args(argc, argv, "1", &proc);
1494
1495 if (!rb_obj_is_proc(proc))
1496 rb_raise(rb_eTypeError,
1497 "wrong argument type %"PRIsVALUE" (expected Proc)",
1498 rb_obj_class(proc));
1499
1500 if (rb_block_given_p()) {
1501 rb_warn("given block not used");
1502 }
1503 }
1504
1505 return generator_init(obj, proc);
1506}
1507
1508/* :nodoc: */
1509static VALUE
1510generator_init_copy(VALUE obj, VALUE orig)
1511{
1512 struct generator *ptr0, *ptr1;
1513
1514 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1515
1516 ptr0 = generator_ptr(orig);
1517
1518 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1519
1520 if (!ptr1) {
1521 rb_raise(rb_eArgError, "unallocated generator");
1522 }
1523
1524 RB_OBJ_WRITE(obj, &ptr1->proc, ptr0->proc);
1525
1526 return obj;
1527}
1528
1529/* :nodoc: */
1530static VALUE
1531generator_each(int argc, VALUE *argv, VALUE obj)
1532{
1533 struct generator *ptr = generator_ptr(obj);
1534 VALUE args = rb_ary_new2(argc + 1);
1535
1536 rb_ary_push(args, yielder_new());
1537 if (argc > 0) {
1538 rb_ary_cat(args, argv, argc);
1539 }
1540
1541 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1542}
1543
1544/* Lazy Enumerator methods */
1545static VALUE
1546enum_size(VALUE self)
1547{
1548 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1549 return UNDEF_P(r) ? Qnil : r;
1550}
1551
1552static VALUE
1553lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1554{
1555 return enum_size(self);
1556}
1557
1558#define lazy_receiver_size lazy_map_size
1559
1560static VALUE
1561lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1562{
1563 VALUE result;
1564 if (argc == 1) {
1565 VALUE args[2];
1566 args[0] = m;
1567 args[1] = val;
1568 result = rb_yield_values2(2, args);
1569 }
1570 else {
1571 VALUE args;
1572 int len = rb_long2int((long)argc + 1);
1573 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1574
1575 nargv[0] = m;
1576 if (argc > 0) {
1577 MEMCPY(nargv + 1, argv, VALUE, argc);
1578 }
1579 result = rb_yield_values2(len, nargv);
1580 ALLOCV_END(args);
1581 }
1582 if (UNDEF_P(result)) rb_iter_break();
1583 return Qnil;
1584}
1585
1586static VALUE
1587lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1588{
1589 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1590 return Qnil;
1591}
1592
1593#define memo_value v2
1594#define memo_flags u3.state
1595#define LAZY_MEMO_BREAK 1
1596#define LAZY_MEMO_PACKED 2
1597#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1598#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1599#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1600#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1601#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1602#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1603#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1604
1605static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1606
1607static VALUE
1608lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1609{
1610 VALUE yielder = RARRAY_AREF(m, 0);
1611 VALUE procs_array = RARRAY_AREF(m, 1);
1612 VALUE memos = rb_attr_get(yielder, id_memo);
1613 struct MEMO *result;
1614
1615 result = MEMO_NEW(m, rb_enum_values_pack(argc, argv),
1616 argc > 1 ? LAZY_MEMO_PACKED : 0);
1617 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1618}
1619
1620static VALUE
1621lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1622{
1623 VALUE m = result->v1;
1624 VALUE yielder = RARRAY_AREF(m, 0);
1625 VALUE procs_array = RARRAY_AREF(m, 1);
1626 VALUE memos = rb_attr_get(yielder, id_memo);
1627 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1628 if (argc > 1)
1629 LAZY_MEMO_SET_PACKED(result);
1630 else
1631 LAZY_MEMO_RESET_PACKED(result);
1632 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1633}
1634
1635static VALUE
1636lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1637{
1638 int cont = 1;
1639
1640 for (; i < RARRAY_LEN(procs_array); i++) {
1641 VALUE proc = RARRAY_AREF(procs_array, i);
1642 struct proc_entry *entry = proc_entry_ptr(proc);
1643 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1644 cont = 0;
1645 break;
1646 }
1647 }
1648
1649 if (cont) {
1650 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1651 }
1652 if (LAZY_MEMO_BREAK_P(result)) {
1653 rb_iter_break();
1654 }
1655 return result->memo_value;
1656}
1657
1658static VALUE
1659lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1660{
1661 VALUE procs = RARRAY_AREF(m, 1);
1662
1663 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1664 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1665 lazy_init_yielder, rb_ary_new3(2, val, procs));
1666 return Qnil;
1667}
1668
1669static VALUE
1670lazy_generator_init(VALUE enumerator, VALUE procs)
1671{
1673 VALUE obj;
1674 struct generator *gen_ptr;
1675 struct enumerator *e = enumerator_ptr(enumerator);
1676
1677 if (RARRAY_LEN(procs) > 0) {
1678 struct generator *old_gen_ptr = generator_ptr(e->obj);
1679 obj = old_gen_ptr->obj;
1680 }
1681 else {
1682 obj = enumerator;
1683 }
1684
1685 generator = generator_allocate(rb_cGenerator);
1686
1687 rb_block_call(generator, id_initialize, 0, 0,
1688 lazy_init_block, rb_ary_new3(2, obj, procs));
1689
1690 gen_ptr = generator_ptr(generator);
1691 RB_OBJ_WRITE(generator, &gen_ptr->obj, obj);
1692
1693 return generator;
1694}
1695
1696static int
1697lazy_precheck(VALUE procs)
1698{
1699 if (RTEST(procs)) {
1700 long num_procs = RARRAY_LEN(procs), i = num_procs;
1701 while (i-- > 0) {
1702 VALUE proc = RARRAY_AREF(procs, i);
1703 struct proc_entry *entry = proc_entry_ptr(proc);
1704 lazyenum_precheck_func *precheck = entry->fn->precheck;
1705 if (precheck && !precheck(proc)) return FALSE;
1706 }
1707 }
1708
1709 return TRUE;
1710}
1711
1712/*
1713 * Document-class: Enumerator::Lazy
1714 *
1715 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1716 * chains of operations without evaluating them immediately, and evaluating
1717 * values on as-needed basis. In order to do so it redefines most of Enumerable
1718 * methods so that they just construct another lazy enumerator.
1719 *
1720 * Enumerator::Lazy can be constructed from any Enumerable with the
1721 * Enumerable#lazy method.
1722 *
1723 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1724 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1725 *
1726 * The real enumeration is performed when any non-redefined Enumerable method
1727 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1728 * as #force for more semantic code):
1729 *
1730 * lazy.first(2)
1731 * #=> [21, 23]
1732 *
1733 * lazy.force
1734 * #=> [21, 23, 25, 27, 29]
1735 *
1736 * Note that most Enumerable methods that could be called with or without
1737 * a block, on Enumerator::Lazy will always require a block:
1738 *
1739 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1740 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1741 *
1742 * This class allows idiomatic calculations on long or infinite sequences, as well
1743 * as chaining of calculations without constructing intermediate arrays.
1744 *
1745 * Example for working with a slowly calculated sequence:
1746 *
1747 * require 'open-uri'
1748 *
1749 * # This will fetch all URLs before selecting
1750 * # necessary data
1751 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1752 * .select { |data| data.key?('stats') }
1753 * .first(5)
1754 *
1755 * # This will fetch URLs one-by-one, only till
1756 * # there is enough data to satisfy the condition
1757 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1758 * .select { |data| data.key?('stats') }
1759 * .first(5)
1760 *
1761 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1762 * is suitable for returning or passing to another method that expects
1763 * a normal enumerator.
1764 *
1765 * def active_items
1766 * groups
1767 * .lazy
1768 * .flat_map(&:items)
1769 * .reject(&:disabled)
1770 * .eager
1771 * end
1772 *
1773 * # This works lazily; if a checked item is found, it stops
1774 * # iteration and does not look into remaining groups.
1775 * first_checked = active_items.find(&:checked)
1776 *
1777 * # This returns an array of items like a normal enumerator does.
1778 * all_checked = active_items.select(&:checked)
1779 *
1780 */
1781
1782/*
1783 * call-seq:
1784 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1785 *
1786 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1787 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1788 * to the given block. The block can yield values back using +yielder+.
1789 * For example, to create a "filter+map" enumerator:
1790 *
1791 * def filter_map(sequence)
1792 * Lazy.new(sequence) do |yielder, *values|
1793 * result = yield *values
1794 * yielder << result if result
1795 * end
1796 * end
1797 *
1798 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1799 * #=> [4, 16, 36, 64, 100]
1800 */
1801static VALUE
1802lazy_initialize(int argc, VALUE *argv, VALUE self)
1803{
1804 VALUE obj, size = Qnil;
1806
1807 rb_check_arity(argc, 1, 2);
1808 if (!rb_block_given_p()) {
1809 rb_raise(rb_eArgError, "tried to call lazy new without a block");
1810 }
1811 obj = argv[0];
1812 if (argc > 1) {
1813 size = argv[1];
1814 }
1815 generator = generator_allocate(rb_cGenerator);
1816 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1817 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1818 rb_ivar_set(self, id_receiver, obj);
1819
1820 return self;
1821}
1822
1823#if 0 /* for RDoc */
1824/*
1825 * call-seq:
1826 * lazy.to_a -> array
1827 * lazy.force -> array
1828 *
1829 * Expands +lazy+ enumerator to an array.
1830 * See Enumerable#to_a.
1831 */
1832static VALUE
1833lazy_to_a(VALUE self)
1834{
1835}
1836#endif
1837
1838static void
1839lazy_set_args(VALUE lazy, VALUE args)
1840{
1841 ID id = rb_frame_this_func();
1842 rb_ivar_set(lazy, id_method, ID2SYM(id));
1843 if (NIL_P(args)) {
1844 /* Qfalse indicates that the arguments are empty */
1845 rb_ivar_set(lazy, id_arguments, Qfalse);
1846 }
1847 else {
1848 rb_ivar_set(lazy, id_arguments, args);
1849 }
1850}
1851
1852#if 0
1853static VALUE
1854lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1855{
1856 struct enumerator *e = enumerator_ptr(lazy);
1857 lazy_set_args(lazy, args);
1858 e->size_fn = size_fn;
1859 return lazy;
1860}
1861#endif
1862
1863static VALUE
1864lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1865 const lazyenum_funcs *fn)
1866{
1867 struct enumerator *new_e;
1868 VALUE new_obj;
1869 VALUE new_generator;
1870 VALUE new_procs;
1871 struct enumerator *e = enumerator_ptr(obj);
1872 struct proc_entry *entry;
1873 VALUE entry_obj = TypedData_Make_Struct(rb_cObject, struct proc_entry,
1874 &proc_entry_data_type, entry);
1875 if (rb_block_given_p()) {
1876 RB_OBJ_WRITE(entry_obj, &entry->proc, rb_block_proc());
1877 }
1878 entry->fn = fn;
1879 RB_OBJ_WRITE(entry_obj, &entry->memo, args);
1880
1881 lazy_set_args(entry_obj, memo);
1882
1883 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1884 new_generator = lazy_generator_init(obj, new_procs);
1885 rb_ary_push(new_procs, entry_obj);
1886
1887 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1888 new_e = RTYPEDDATA_GET_DATA(new_obj);
1889 RB_OBJ_WRITE(new_obj, &new_e->obj, new_generator);
1890 RB_OBJ_WRITE(new_obj, &new_e->procs, new_procs);
1891
1892 if (argc > 0) {
1893 new_e->meth = rb_to_id(*argv++);
1894 --argc;
1895 }
1896 else {
1897 new_e->meth = id_each;
1898 }
1899
1900 RB_OBJ_WRITE(new_obj, &new_e->args, rb_ary_new4(argc, argv));
1901
1902 return new_obj;
1903}
1904
1905/*
1906 * call-seq:
1907 * e.lazy -> lazy_enumerator
1908 *
1909 * Returns an Enumerator::Lazy, which redefines most Enumerable
1910 * methods to postpone enumeration and enumerate values only on an
1911 * as-needed basis.
1912 *
1913 * === Example
1914 *
1915 * The following program finds pythagorean triples:
1916 *
1917 * def pythagorean_triples
1918 * (1..Float::INFINITY).lazy.flat_map {|z|
1919 * (1..z).flat_map {|x|
1920 * (x..z).select {|y|
1921 * x**2 + y**2 == z**2
1922 * }.map {|y|
1923 * [x, y, z]
1924 * }
1925 * }
1926 * }
1927 * end
1928 * # show first ten pythagorean triples
1929 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1930 * p pythagorean_triples.first(10) # first is eager
1931 * # show pythagorean triples less than 100
1932 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1933 */
1934static VALUE
1935enumerable_lazy(VALUE obj)
1936{
1937 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1938 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1939 rb_ivar_set(result, id_method, Qfalse);
1940 return result;
1941}
1942
1943static VALUE
1944lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1945{
1946 return enumerator_init(enumerator_allocate(rb_cLazy),
1947 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1948}
1949
1950/*
1951 * call-seq:
1952 * lzy.to_enum(method = :each, *args) -> lazy_enum
1953 * lzy.enum_for(method = :each, *args) -> lazy_enum
1954 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1955 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1956 *
1957 * Similar to Object#to_enum, except it returns a lazy enumerator.
1958 * This makes it easy to define Enumerable methods that will
1959 * naturally remain lazy if called from a lazy enumerator.
1960 *
1961 * For example, continuing from the example in Object#to_enum:
1962 *
1963 * # See Object#to_enum for the definition of repeat
1964 * r = 1..Float::INFINITY
1965 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1966 * r.repeat(2).class # => Enumerator
1967 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1968 * # works naturally on lazy enumerator:
1969 * r.lazy.repeat(2).class # => Enumerator::Lazy
1970 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1971 */
1972
1973static VALUE
1974lazy_to_enum(int argc, VALUE *argv, VALUE self)
1975{
1976 VALUE lazy, meth = sym_each, super_meth;
1977
1978 if (argc > 0) {
1979 --argc;
1980 meth = *argv++;
1981 }
1982 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
1983 meth = super_meth;
1984 }
1985 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
1986 if (rb_block_given_p()) {
1987 RB_OBJ_WRITE(lazy, &enumerator_ptr(lazy)->size, rb_block_proc());
1988 }
1989 return lazy;
1990}
1991
1992static VALUE
1993lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
1994{
1995 return enum_size(self);
1996}
1997
1998/*
1999 * call-seq:
2000 * lzy.eager -> enum
2001 *
2002 * Returns a non-lazy Enumerator converted from the lazy enumerator.
2003 */
2004
2005static VALUE
2006lazy_eager(VALUE self)
2007{
2008 return enumerator_init(enumerator_allocate(rb_cEnumerator),
2009 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
2010}
2011
2012static VALUE
2013lazyenum_yield(VALUE proc_entry, struct MEMO *result)
2014{
2015 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2016 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2017}
2018
2019static VALUE
2020lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2021{
2022 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2023 int argc = 1;
2024 const VALUE *argv = &result->memo_value;
2025 if (LAZY_MEMO_PACKED_P(result)) {
2026 const VALUE args = *argv;
2027 argc = RARRAY_LENINT(args);
2028 argv = RARRAY_CONST_PTR(args);
2029 }
2030 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2031}
2032
2033static struct MEMO *
2034lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2035{
2036 VALUE value = lazyenum_yield_values(proc_entry, result);
2037 LAZY_MEMO_SET_VALUE(result, value);
2038 LAZY_MEMO_RESET_PACKED(result);
2039 return result;
2040}
2041
2042static VALUE
2043lazy_map_size(VALUE entry, VALUE receiver)
2044{
2045 return receiver;
2046}
2047
2048static const lazyenum_funcs lazy_map_funcs = {
2049 lazy_map_proc, lazy_map_size,
2050};
2051
2052/*
2053 * call-seq:
2054 * lazy.collect { |obj| block } -> lazy_enumerator
2055 * lazy.map { |obj| block } -> lazy_enumerator
2056 *
2057 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2058 *
2059 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2060 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2061 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2062 * #=> [1, 4, 9]
2063 */
2064
2065static VALUE
2066lazy_map(VALUE obj)
2067{
2068 if (!rb_block_given_p()) {
2069 rb_raise(rb_eArgError, "tried to call lazy map without a block");
2070 }
2071
2072 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2073}
2074
2076 struct MEMO *result;
2077 long index;
2078};
2079
2080static VALUE
2081lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2082{
2083 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2084
2085 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2086}
2087
2088static struct MEMO *
2089lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2090{
2091 VALUE value = lazyenum_yield_values(proc_entry, result);
2092 VALUE ary = 0;
2093 const long proc_index = memo_index + 1;
2094 int break_p = LAZY_MEMO_BREAK_P(result);
2095
2096 if (RB_TYPE_P(value, T_ARRAY)) {
2097 ary = value;
2098 }
2099 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2100 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2101 LAZY_MEMO_RESET_BREAK(result);
2102 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2103 if (break_p) LAZY_MEMO_SET_BREAK(result);
2104 return 0;
2105 }
2106
2107 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2108 long i;
2109 LAZY_MEMO_RESET_BREAK(result);
2110 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2111 const VALUE argv = RARRAY_AREF(ary, i);
2112 lazy_yielder_yield(result, proc_index, 1, &argv);
2113 }
2114 if (break_p) LAZY_MEMO_SET_BREAK(result);
2115 if (i >= RARRAY_LEN(ary)) return 0;
2116 value = RARRAY_AREF(ary, i);
2117 }
2118 LAZY_MEMO_SET_VALUE(result, value);
2119 LAZY_MEMO_RESET_PACKED(result);
2120 return result;
2121}
2122
2123static const lazyenum_funcs lazy_flat_map_funcs = {
2124 lazy_flat_map_proc, 0,
2125};
2126
2127/*
2128 * call-seq:
2129 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2130 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2131 *
2132 * Returns a new lazy enumerator with the concatenated results of running
2133 * +block+ once for every element in the lazy enumerator.
2134 *
2135 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2136 * #=> ["f", "o", "o", "b", "a", "r"]
2137 *
2138 * A value +x+ returned by +block+ is decomposed if either of
2139 * the following conditions is true:
2140 *
2141 * * +x+ responds to both each and force, which means that
2142 * +x+ is a lazy enumerator.
2143 * * +x+ is an array or responds to to_ary.
2144 *
2145 * Otherwise, +x+ is contained as-is in the return value.
2146 *
2147 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2148 * #=> [{:a=>1}, {:b=>2}]
2149 */
2150static VALUE
2151lazy_flat_map(VALUE obj)
2152{
2153 if (!rb_block_given_p()) {
2154 rb_raise(rb_eArgError, "tried to call lazy flat_map without a block");
2155 }
2156
2157 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2158}
2159
2160static struct MEMO *
2161lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2162{
2163 VALUE chain = lazyenum_yield(proc_entry, result);
2164 if (!RTEST(chain)) return 0;
2165 return result;
2166}
2167
2168static const lazyenum_funcs lazy_select_funcs = {
2169 lazy_select_proc, 0,
2170};
2171
2172/*
2173 * call-seq:
2174 * lazy.find_all { |obj| block } -> lazy_enumerator
2175 * lazy.select { |obj| block } -> lazy_enumerator
2176 * lazy.filter { |obj| block } -> lazy_enumerator
2177 *
2178 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2179 */
2180static VALUE
2181lazy_select(VALUE obj)
2182{
2183 if (!rb_block_given_p()) {
2184 rb_raise(rb_eArgError, "tried to call lazy select without a block");
2185 }
2186
2187 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2188}
2189
2190static struct MEMO *
2191lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2192{
2193 VALUE value = lazyenum_yield_values(proc_entry, result);
2194 if (!RTEST(value)) return 0;
2195 LAZY_MEMO_SET_VALUE(result, value);
2196 LAZY_MEMO_RESET_PACKED(result);
2197 return result;
2198}
2199
2200static const lazyenum_funcs lazy_filter_map_funcs = {
2201 lazy_filter_map_proc, 0,
2202};
2203
2204/*
2205 * call-seq:
2206 * lazy.filter_map { |obj| block } -> lazy_enumerator
2207 *
2208 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2209 *
2210 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2211 * #=> [4, 8, 12, 16, 20]
2212 */
2213
2214static VALUE
2215lazy_filter_map(VALUE obj)
2216{
2217 if (!rb_block_given_p()) {
2218 rb_raise(rb_eArgError, "tried to call lazy filter_map without a block");
2219 }
2220
2221 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2222}
2223
2224static struct MEMO *
2225lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2226{
2227 VALUE chain = lazyenum_yield(proc_entry, result);
2228 if (RTEST(chain)) return 0;
2229 return result;
2230}
2231
2232static const lazyenum_funcs lazy_reject_funcs = {
2233 lazy_reject_proc, 0,
2234};
2235
2236/*
2237 * call-seq:
2238 * lazy.reject { |obj| block } -> lazy_enumerator
2239 *
2240 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2241 */
2242
2243static VALUE
2244lazy_reject(VALUE obj)
2245{
2246 if (!rb_block_given_p()) {
2247 rb_raise(rb_eArgError, "tried to call lazy reject without a block");
2248 }
2249
2250 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2251}
2252
2253static struct MEMO *
2254lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2255{
2256 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2257 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2258 if (!RTEST(chain)) return 0;
2259 return result;
2260}
2261
2262static struct MEMO *
2263lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2264{
2265 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2266 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2267
2268 if (!RTEST(chain)) return 0;
2269 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2270 LAZY_MEMO_SET_VALUE(result, value);
2271 LAZY_MEMO_RESET_PACKED(result);
2272
2273 return result;
2274}
2275
2276static const lazyenum_funcs lazy_grep_iter_funcs = {
2277 lazy_grep_iter_proc, 0,
2278};
2279
2280static const lazyenum_funcs lazy_grep_funcs = {
2281 lazy_grep_proc, 0,
2282};
2283
2284/*
2285 * call-seq:
2286 * lazy.grep(pattern) -> lazy_enumerator
2287 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2288 *
2289 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2290 */
2291
2292static VALUE
2293lazy_grep(VALUE obj, VALUE pattern)
2294{
2295 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2296 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2297 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2298}
2299
2300static struct MEMO *
2301lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2302{
2303 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2304 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2305 if (RTEST(chain)) return 0;
2306 return result;
2307}
2308
2309static struct MEMO *
2310lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2311{
2312 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2313 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2314
2315 if (RTEST(chain)) return 0;
2316 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2317 LAZY_MEMO_SET_VALUE(result, value);
2318 LAZY_MEMO_RESET_PACKED(result);
2319
2320 return result;
2321}
2322
2323static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2324 lazy_grep_v_iter_proc, 0,
2325};
2326
2327static const lazyenum_funcs lazy_grep_v_funcs = {
2328 lazy_grep_v_proc, 0,
2329};
2330
2331/*
2332 * call-seq:
2333 * lazy.grep_v(pattern) -> lazy_enumerator
2334 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2335 *
2336 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2337 */
2338
2339static VALUE
2340lazy_grep_v(VALUE obj, VALUE pattern)
2341{
2342 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2343 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2344 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2345}
2346
2347static VALUE
2348call_next(VALUE obj)
2349{
2350 return rb_funcall(obj, id_next, 0);
2351}
2352
2353static VALUE
2354next_stopped(VALUE obj, VALUE _)
2355{
2356 return Qnil;
2357}
2358
2359static struct MEMO *
2360lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2361{
2362 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2363 VALUE ary, arrays = entry->memo;
2364 VALUE memo = rb_ary_entry(memos, memo_index);
2365 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2366
2367 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2368 rb_ary_push(ary, result->memo_value);
2369 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2370 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2371 }
2372 LAZY_MEMO_SET_VALUE(result, ary);
2373 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2374 return result;
2375}
2376
2377static struct MEMO *
2378lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2379{
2380 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2381 VALUE arg = rb_ary_entry(memos, memo_index);
2382 VALUE zip_args = entry->memo;
2383 VALUE ary, v;
2384 long i;
2385
2386 if (NIL_P(arg)) {
2387 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2388 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2389 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2390 }
2391 rb_ary_store(memos, memo_index, arg);
2392 }
2393
2394 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2395 rb_ary_push(ary, result->memo_value);
2396 for (i = 0; i < RARRAY_LEN(arg); i++) {
2397 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2399 rb_ary_push(ary, v);
2400 }
2401 LAZY_MEMO_SET_VALUE(result, ary);
2402 return result;
2403}
2404
2405static const lazyenum_funcs lazy_zip_funcs[] = {
2406 {lazy_zip_func, lazy_receiver_size,},
2407 {lazy_zip_arrays_func, lazy_receiver_size,},
2408};
2409
2410/*
2411 * call-seq:
2412 * lazy.zip(arg, ...) -> lazy_enumerator
2413 * lazy.zip(arg, ...) { |arr| block } -> nil
2414 *
2415 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2416 * However, if a block is given to zip, values are enumerated immediately.
2417 */
2418static VALUE
2419lazy_zip(int argc, VALUE *argv, VALUE obj)
2420{
2421 VALUE ary, v;
2422 long i;
2423 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2424
2425 if (rb_block_given_p()) {
2426 return rb_call_super(argc, argv);
2427 }
2428
2429 ary = rb_ary_new2(argc);
2430 for (i = 0; i < argc; i++) {
2431 v = rb_check_array_type(argv[i]);
2432 if (NIL_P(v)) {
2433 for (; i < argc; i++) {
2434 if (!rb_respond_to(argv[i], id_each)) {
2435 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2436 rb_obj_class(argv[i]));
2437 }
2438 }
2439 ary = rb_ary_new4(argc, argv);
2440 funcs = &lazy_zip_funcs[0];
2441 break;
2442 }
2443 rb_ary_push(ary, v);
2444 }
2445
2446 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2447}
2448
2449static struct MEMO *
2450lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2451{
2452 long remain;
2453 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2454 VALUE memo = rb_ary_entry(memos, memo_index);
2455
2456 if (NIL_P(memo)) {
2457 memo = entry->memo;
2458 }
2459
2460 remain = NUM2LONG(memo);
2461 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2462 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2463 return result;
2464}
2465
2466static VALUE
2467lazy_take_size(VALUE entry, VALUE receiver)
2468{
2469 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2470 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2471 return receiver;
2472 return LONG2NUM(len);
2473}
2474
2475static int
2476lazy_take_precheck(VALUE proc_entry)
2477{
2478 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2479 return entry->memo != INT2FIX(0);
2480}
2481
2482static const lazyenum_funcs lazy_take_funcs = {
2483 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2484};
2485
2486/*
2487 * call-seq:
2488 * lazy.take(n) -> lazy_enumerator
2489 *
2490 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2491 */
2492
2493static VALUE
2494lazy_take(VALUE obj, VALUE n)
2495{
2496 long len = NUM2LONG(n);
2497
2498 if (len < 0) {
2499 rb_raise(rb_eArgError, "attempt to take negative size");
2500 }
2501
2502 n = LONG2NUM(len); /* no more conversion */
2503
2504 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2505}
2506
2507static struct MEMO *
2508lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2509{
2510 VALUE take = lazyenum_yield_values(proc_entry, result);
2511 if (!RTEST(take)) {
2512 LAZY_MEMO_SET_BREAK(result);
2513 return 0;
2514 }
2515 return result;
2516}
2517
2518static const lazyenum_funcs lazy_take_while_funcs = {
2519 lazy_take_while_proc, 0,
2520};
2521
2522/*
2523 * call-seq:
2524 * lazy.take_while { |obj| block } -> lazy_enumerator
2525 *
2526 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2527 */
2528
2529static VALUE
2530lazy_take_while(VALUE obj)
2531{
2532 if (!rb_block_given_p()) {
2533 rb_raise(rb_eArgError, "tried to call lazy take_while without a block");
2534 }
2535
2536 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2537}
2538
2539static VALUE
2540lazy_drop_size(VALUE proc_entry, VALUE receiver)
2541{
2542 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2543 if (NIL_P(receiver))
2544 return receiver;
2545 if (FIXNUM_P(receiver)) {
2546 len = FIX2LONG(receiver) - len;
2547 return LONG2FIX(len < 0 ? 0 : len);
2548 }
2549 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2550}
2551
2552static struct MEMO *
2553lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2554{
2555 long remain;
2556 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2557 VALUE memo = rb_ary_entry(memos, memo_index);
2558
2559 if (NIL_P(memo)) {
2560 memo = entry->memo;
2561 }
2562 remain = NUM2LONG(memo);
2563 if (remain > 0) {
2564 --remain;
2565 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2566 return 0;
2567 }
2568
2569 return result;
2570}
2571
2572static const lazyenum_funcs lazy_drop_funcs = {
2573 lazy_drop_proc, lazy_drop_size,
2574};
2575
2576/*
2577 * call-seq:
2578 * lazy.drop(n) -> lazy_enumerator
2579 *
2580 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2581 */
2582
2583static VALUE
2584lazy_drop(VALUE obj, VALUE n)
2585{
2586 long len = NUM2LONG(n);
2587 VALUE argv[2];
2588 argv[0] = sym_each;
2589 argv[1] = n;
2590
2591 if (len < 0) {
2592 rb_raise(rb_eArgError, "attempt to drop negative size");
2593 }
2594
2595 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2596}
2597
2598static struct MEMO *
2599lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2600{
2601 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2602 VALUE memo = rb_ary_entry(memos, memo_index);
2603
2604 if (NIL_P(memo)) {
2605 memo = entry->memo;
2606 }
2607
2608 if (!RTEST(memo)) {
2609 VALUE drop = lazyenum_yield_values(proc_entry, result);
2610 if (RTEST(drop)) return 0;
2611 rb_ary_store(memos, memo_index, Qtrue);
2612 }
2613 return result;
2614}
2615
2616static const lazyenum_funcs lazy_drop_while_funcs = {
2617 lazy_drop_while_proc, 0,
2618};
2619
2620/*
2621 * call-seq:
2622 * lazy.drop_while { |obj| block } -> lazy_enumerator
2623 *
2624 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2625 */
2626
2627static VALUE
2628lazy_drop_while(VALUE obj)
2629{
2630 if (!rb_block_given_p()) {
2631 rb_raise(rb_eArgError, "tried to call lazy drop_while without a block");
2632 }
2633
2634 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2635}
2636
2637static int
2638lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2639{
2640 VALUE hash = rb_ary_entry(memos, memo_index);
2641
2642 if (NIL_P(hash)) {
2643 hash = rb_obj_hide(rb_hash_new());
2644 rb_ary_store(memos, memo_index, hash);
2645 }
2646
2647 return rb_hash_add_new_element(hash, chain, Qfalse);
2648}
2649
2650static struct MEMO *
2651lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2652{
2653 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2654 return result;
2655}
2656
2657static struct MEMO *
2658lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2659{
2660 VALUE chain = lazyenum_yield(proc_entry, result);
2661
2662 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2663 return result;
2664}
2665
2666static const lazyenum_funcs lazy_uniq_iter_funcs = {
2667 lazy_uniq_iter_proc, 0,
2668};
2669
2670static const lazyenum_funcs lazy_uniq_funcs = {
2671 lazy_uniq_proc, 0,
2672};
2673
2674/*
2675 * call-seq:
2676 * lazy.uniq -> lazy_enumerator
2677 * lazy.uniq { |item| block } -> lazy_enumerator
2678 *
2679 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2680 */
2681
2682static VALUE
2683lazy_uniq(VALUE obj)
2684{
2685 const lazyenum_funcs *const funcs =
2686 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2687 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2688}
2689
2690static struct MEMO *
2691lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2692{
2693 if (NIL_P(result->memo_value)) return 0;
2694 return result;
2695}
2696
2697static const lazyenum_funcs lazy_compact_funcs = {
2698 lazy_compact_proc, 0,
2699};
2700
2701/*
2702 * call-seq:
2703 * lazy.compact -> lazy_enumerator
2704 *
2705 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2706 */
2707
2708static VALUE
2709lazy_compact(VALUE obj)
2710{
2711 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2712}
2713
2714static struct MEMO *
2715lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2716{
2717 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2718 VALUE memo = rb_ary_entry(memos, memo_index);
2719 VALUE argv[2];
2720
2721 if (NIL_P(memo)) {
2722 memo = entry->memo;
2723 }
2724
2725 argv[0] = result->memo_value;
2726 argv[1] = memo;
2727 if (entry->proc) {
2728 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2729 LAZY_MEMO_RESET_PACKED(result);
2730 }
2731 else {
2732 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2733 LAZY_MEMO_SET_PACKED(result);
2734 }
2735 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2736 return result;
2737}
2738
2739static VALUE
2740lazy_with_index_size(VALUE proc, VALUE receiver)
2741{
2742 return receiver;
2743}
2744
2745static const lazyenum_funcs lazy_with_index_funcs = {
2746 lazy_with_index_proc, lazy_with_index_size,
2747};
2748
2749/*
2750 * call-seq:
2751 * lazy.with_index(offset = 0) {|(*args), idx| block }
2752 * lazy.with_index(offset = 0)
2753 *
2754 * If a block is given, returns a lazy enumerator that will
2755 * iterate over the given block for each element
2756 * with an index, which starts from +offset+, and returns a
2757 * lazy enumerator that yields the same values (without the index).
2758 *
2759 * If a block is not given, returns a new lazy enumerator that
2760 * includes the index, starting from +offset+.
2761 *
2762 * +offset+:: the starting index to use
2763 *
2764 * See Enumerator#with_index.
2765 */
2766static VALUE
2767lazy_with_index(int argc, VALUE *argv, VALUE obj)
2768{
2769 VALUE memo;
2770
2771 rb_scan_args(argc, argv, "01", &memo);
2772 if (NIL_P(memo))
2773 memo = LONG2NUM(0);
2774
2775 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2776}
2777
2778#if 0 /* for RDoc */
2779
2780/*
2781 * call-seq:
2782 * lazy.chunk { |elt| ... } -> lazy_enumerator
2783 *
2784 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2785 */
2786static VALUE
2787lazy_chunk(VALUE self)
2788{
2789}
2790
2791/*
2792 * call-seq:
2793 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2794 *
2795 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2796 */
2797static VALUE
2798lazy_chunk_while(VALUE self)
2799{
2800}
2801
2802/*
2803 * call-seq:
2804 * lazy.slice_after(pattern) -> lazy_enumerator
2805 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2806 *
2807 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2808 */
2809static VALUE
2810lazy_slice_after(VALUE self)
2811{
2812}
2813
2814/*
2815 * call-seq:
2816 * lazy.slice_before(pattern) -> lazy_enumerator
2817 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2818 *
2819 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2820 */
2821static VALUE
2822lazy_slice_before(VALUE self)
2823{
2824}
2825
2826/*
2827 * call-seq:
2828 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2829 *
2830 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2831 */
2832static VALUE
2833lazy_slice_when(VALUE self)
2834{
2835}
2836# endif
2837
2838static VALUE
2839lazy_super(int argc, VALUE *argv, VALUE lazy)
2840{
2841 return enumerable_lazy(rb_call_super(argc, argv));
2842}
2843
2844/*
2845 * call-seq:
2846 * enum.lazy -> lazy_enumerator
2847 *
2848 * Returns self.
2849 */
2850
2851static VALUE
2852lazy_lazy(VALUE obj)
2853{
2854 return obj;
2855}
2856
2857/*
2858 * Document-class: StopIteration
2859 *
2860 * Raised to stop the iteration, in particular by Enumerator#next. It is
2861 * rescued by Kernel#loop.
2862 *
2863 * loop do
2864 * puts "Hello"
2865 * raise StopIteration
2866 * puts "World"
2867 * end
2868 * puts "Done!"
2869 *
2870 * <em>produces:</em>
2871 *
2872 * Hello
2873 * Done!
2874 */
2875
2876/*
2877 * call-seq:
2878 * result -> value
2879 *
2880 * Returns the return value of the iterator.
2881 *
2882 * o = Object.new
2883 * def o.each
2884 * yield 1
2885 * yield 2
2886 * yield 3
2887 * 100
2888 * end
2889 *
2890 * e = o.to_enum
2891 *
2892 * puts e.next #=> 1
2893 * puts e.next #=> 2
2894 * puts e.next #=> 3
2895 *
2896 * begin
2897 * e.next
2898 * rescue StopIteration => ex
2899 * puts ex.result #=> 100
2900 * end
2901 *
2902 */
2903
2904static VALUE
2905stop_result(VALUE self)
2906{
2907 return rb_attr_get(self, id_result);
2908}
2909
2910/*
2911 * Producer
2912 */
2913
2914static void
2915producer_mark(void *p)
2916{
2917 struct producer *ptr = p;
2918 rb_gc_mark_movable(ptr->init);
2919 rb_gc_mark_movable(ptr->proc);
2920}
2921
2922static void
2923producer_compact(void *p)
2924{
2925 struct producer *ptr = p;
2926 ptr->init = rb_gc_location(ptr->init);
2927 ptr->proc = rb_gc_location(ptr->proc);
2928}
2929
2930#define producer_free RUBY_TYPED_DEFAULT_FREE
2931
2932static size_t
2933producer_memsize(const void *p)
2934{
2935 return sizeof(struct producer);
2936}
2937
2938static const rb_data_type_t producer_data_type = {
2939 "producer",
2940 {
2941 producer_mark,
2942 producer_free,
2943 producer_memsize,
2944 producer_compact,
2945 },
2946 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
2947};
2948
2949static struct producer *
2950producer_ptr(VALUE obj)
2951{
2952 struct producer *ptr;
2953
2954 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2955 if (!ptr || UNDEF_P(ptr->proc)) {
2956 rb_raise(rb_eArgError, "uninitialized producer");
2957 }
2958 return ptr;
2959}
2960
2961/* :nodoc: */
2962static VALUE
2963producer_allocate(VALUE klass)
2964{
2965 struct producer *ptr;
2966 VALUE obj;
2967
2968 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
2969 ptr->init = Qundef;
2970 ptr->proc = Qundef;
2971
2972 return obj;
2973}
2974
2975static VALUE
2976producer_init(VALUE obj, VALUE init, VALUE proc)
2977{
2978 struct producer *ptr;
2979
2980 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2981
2982 if (!ptr) {
2983 rb_raise(rb_eArgError, "unallocated producer");
2984 }
2985
2986 RB_OBJ_WRITE(obj, &ptr->init, init);
2987 RB_OBJ_WRITE(obj, &ptr->proc, proc);
2988
2989 return obj;
2990}
2991
2992static VALUE
2993producer_each_stop(VALUE dummy, VALUE exc)
2994{
2995 return rb_attr_get(exc, id_result);
2996}
2997
2998NORETURN(static VALUE producer_each_i(VALUE obj));
2999
3000static VALUE
3001producer_each_i(VALUE obj)
3002{
3003 struct producer *ptr;
3004 VALUE init, proc, curr;
3005
3006 ptr = producer_ptr(obj);
3007 init = ptr->init;
3008 proc = ptr->proc;
3009
3010 if (UNDEF_P(init)) {
3011 curr = Qnil;
3012 }
3013 else {
3014 rb_yield(init);
3015 curr = init;
3016 }
3017
3018 for (;;) {
3019 curr = rb_funcall(proc, id_call, 1, curr);
3020 rb_yield(curr);
3021 }
3022
3024}
3025
3026/* :nodoc: */
3027static VALUE
3028producer_each(VALUE obj)
3029{
3030 rb_need_block();
3031
3032 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
3033}
3034
3035static VALUE
3036producer_size(VALUE obj, VALUE args, VALUE eobj)
3037{
3038 return DBL2NUM(HUGE_VAL);
3039}
3040
3041/*
3042 * call-seq:
3043 * Enumerator.produce(initial = nil) { |prev| block } -> enumerator
3044 *
3045 * Creates an infinite enumerator from any block, just called over and
3046 * over. The result of the previous iteration is passed to the next one.
3047 * If +initial+ is provided, it is passed to the first iteration, and
3048 * becomes the first element of the enumerator; if it is not provided,
3049 * the first iteration receives +nil+, and its result becomes the first
3050 * element of the iterator.
3051 *
3052 * Raising StopIteration from the block stops an iteration.
3053 *
3054 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3055 *
3056 * Enumerator.produce { rand(10) } # => infinite random number sequence
3057 *
3058 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3059 * enclosing_section = ancestors.find { |n| n.type == :section }
3060 *
3061 * Using ::produce together with Enumerable methods like Enumerable#detect,
3062 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3063 * for +while+ and +until+ cycles:
3064 *
3065 * # Find next Tuesday
3066 * require "date"
3067 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3068 *
3069 * # Simple lexer:
3070 * require "strscan"
3071 * scanner = StringScanner.new("7+38/6")
3072 * PATTERN = %r{\d+|[-/+*]}
3073 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3074 * # => ["7", "+", "38", "/", "6"]
3075 */
3076static VALUE
3077enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3078{
3079 VALUE init, producer;
3080
3081 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3082
3083 if (rb_scan_args(argc, argv, "01", &init) == 0) {
3084 init = Qundef;
3085 }
3086
3087 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc());
3088
3089 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3090}
3091
3092/*
3093 * Document-class: Enumerator::Chain
3094 *
3095 * Enumerator::Chain is a subclass of Enumerator, which represents a
3096 * chain of enumerables that works as a single enumerator.
3097 *
3098 * This type of objects can be created by Enumerable#chain and
3099 * Enumerator#+.
3100 */
3101
3102static void
3103enum_chain_mark(void *p)
3104{
3105 struct enum_chain *ptr = p;
3106 rb_gc_mark_movable(ptr->enums);
3107}
3108
3109static void
3110enum_chain_compact(void *p)
3111{
3112 struct enum_chain *ptr = p;
3113 ptr->enums = rb_gc_location(ptr->enums);
3114}
3115
3116#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3117
3118static size_t
3119enum_chain_memsize(const void *p)
3120{
3121 return sizeof(struct enum_chain);
3122}
3123
3124static const rb_data_type_t enum_chain_data_type = {
3125 "chain",
3126 {
3127 enum_chain_mark,
3128 enum_chain_free,
3129 enum_chain_memsize,
3130 enum_chain_compact,
3131 },
3132 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3133};
3134
3135static struct enum_chain *
3136enum_chain_ptr(VALUE obj)
3137{
3138 struct enum_chain *ptr;
3139
3140 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3141 if (!ptr || UNDEF_P(ptr->enums)) {
3142 rb_raise(rb_eArgError, "uninitialized chain");
3143 }
3144 return ptr;
3145}
3146
3147/* :nodoc: */
3148static VALUE
3149enum_chain_allocate(VALUE klass)
3150{
3151 struct enum_chain *ptr;
3152 VALUE obj;
3153
3154 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3155 ptr->enums = Qundef;
3156 ptr->pos = -1;
3157
3158 return obj;
3159}
3160
3161/*
3162 * call-seq:
3163 * Enumerator::Chain.new(*enums) -> enum
3164 *
3165 * Generates a new enumerator object that iterates over the elements
3166 * of given enumerable objects in sequence.
3167 *
3168 * e = Enumerator::Chain.new(1..3, [4, 5])
3169 * e.to_a #=> [1, 2, 3, 4, 5]
3170 * e.size #=> 5
3171 */
3172static VALUE
3173enum_chain_initialize(VALUE obj, VALUE enums)
3174{
3175 struct enum_chain *ptr;
3176
3177 rb_check_frozen(obj);
3178 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3179
3180 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3181
3182 ptr->enums = rb_ary_freeze(enums);
3183 ptr->pos = -1;
3184
3185 return obj;
3186}
3187
3188static VALUE
3189new_enum_chain(VALUE enums)
3190{
3191 long i;
3192 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3193
3194 for (i = 0; i < RARRAY_LEN(enums); i++) {
3195 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3196 return enumerable_lazy(obj);
3197 }
3198 }
3199
3200 return obj;
3201}
3202
3203/* :nodoc: */
3204static VALUE
3205enum_chain_init_copy(VALUE obj, VALUE orig)
3206{
3207 struct enum_chain *ptr0, *ptr1;
3208
3209 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3210 ptr0 = enum_chain_ptr(orig);
3211
3212 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3213
3214 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3215
3216 ptr1->enums = ptr0->enums;
3217 ptr1->pos = ptr0->pos;
3218
3219 return obj;
3220}
3221
3222static VALUE
3223enum_chain_total_size(VALUE enums)
3224{
3225 VALUE total = INT2FIX(0);
3226 long i;
3227
3228 for (i = 0; i < RARRAY_LEN(enums); i++) {
3229 VALUE size = enum_size(RARRAY_AREF(enums, i));
3230
3231 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3232 return size;
3233 }
3234 if (!RB_INTEGER_TYPE_P(size)) {
3235 return Qnil;
3236 }
3237
3238 total = rb_funcall(total, '+', 1, size);
3239 }
3240
3241 return total;
3242}
3243
3244/*
3245 * call-seq:
3246 * obj.size -> int, Float::INFINITY or nil
3247 *
3248 * Returns the total size of the enumerator chain calculated by
3249 * summing up the size of each enumerable in the chain. If any of the
3250 * enumerables reports its size as nil or Float::INFINITY, that value
3251 * is returned as the total size.
3252 */
3253static VALUE
3254enum_chain_size(VALUE obj)
3255{
3256 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3257}
3258
3259static VALUE
3260enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3261{
3262 return enum_chain_size(obj);
3263}
3264
3265static VALUE
3266enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3267{
3268 return Qnil;
3269}
3270
3271/*
3272 * call-seq:
3273 * obj.each(*args) { |...| ... } -> obj
3274 * obj.each(*args) -> enumerator
3275 *
3276 * Iterates over the elements of the first enumerable by calling the
3277 * "each" method on it with the given arguments, then proceeds to the
3278 * following enumerables in sequence until all of the enumerables are
3279 * exhausted.
3280 *
3281 * If no block is given, returns an enumerator.
3282 */
3283static VALUE
3284enum_chain_each(int argc, VALUE *argv, VALUE obj)
3285{
3286 VALUE enums, block;
3287 struct enum_chain *objptr;
3288 long i;
3289
3290 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3291
3292 objptr = enum_chain_ptr(obj);
3293 enums = objptr->enums;
3294 block = rb_block_proc();
3295
3296 for (i = 0; i < RARRAY_LEN(enums); i++) {
3297 objptr->pos = i;
3298 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3299 }
3300
3301 return obj;
3302}
3303
3304/*
3305 * call-seq:
3306 * obj.rewind -> obj
3307 *
3308 * Rewinds the enumerator chain by calling the "rewind" method on each
3309 * enumerable in reverse order. Each call is performed only if the
3310 * enumerable responds to the method.
3311 */
3312static VALUE
3313enum_chain_rewind(VALUE obj)
3314{
3315 struct enum_chain *objptr = enum_chain_ptr(obj);
3316 VALUE enums = objptr->enums;
3317 long i;
3318
3319 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3320 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3321 }
3322
3323 return obj;
3324}
3325
3326static VALUE
3327inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3328{
3329 VALUE klass = rb_obj_class(obj);
3330 struct enum_chain *ptr;
3331
3332 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3333
3334 if (!ptr || UNDEF_P(ptr->enums)) {
3335 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3336 }
3337
3338 if (recur) {
3339 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3340 }
3341
3342 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3343}
3344
3345/*
3346 * call-seq:
3347 * obj.inspect -> string
3348 *
3349 * Returns a printable version of the enumerator chain.
3350 */
3351static VALUE
3352enum_chain_inspect(VALUE obj)
3353{
3354 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3355}
3356
3357/*
3358 * call-seq:
3359 * e.chain(*enums) -> enumerator
3360 *
3361 * Returns an enumerator object generated from this enumerator and
3362 * given enumerables.
3363 *
3364 * e = (1..3).chain([4, 5])
3365 * e.to_a #=> [1, 2, 3, 4, 5]
3366 */
3367static VALUE
3368enum_chain(int argc, VALUE *argv, VALUE obj)
3369{
3370 VALUE enums = rb_ary_new_from_values(1, &obj);
3371 rb_ary_cat(enums, argv, argc);
3372 return new_enum_chain(enums);
3373}
3374
3375/*
3376 * call-seq:
3377 * e + enum -> enumerator
3378 *
3379 * Returns an enumerator object generated from this enumerator and a
3380 * given enumerable.
3381 *
3382 * e = (1..3).each + [4, 5]
3383 * e.to_a #=> [1, 2, 3, 4, 5]
3384 */
3385static VALUE
3386enumerator_plus(VALUE obj, VALUE eobj)
3387{
3388 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3389}
3390
3391/*
3392 * Document-class: Enumerator::Product
3393 *
3394 * Enumerator::Product generates a Cartesian product of any number of
3395 * enumerable objects. Iterating over the product of enumerable
3396 * objects is roughly equivalent to nested each_entry loops where the
3397 * loop for the rightmost object is put innermost.
3398 *
3399 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3400 *
3401 * innings.each do |i, h|
3402 * p [i, h]
3403 * end
3404 * # [1, "top"]
3405 * # [1, "bottom"]
3406 * # [2, "top"]
3407 * # [2, "bottom"]
3408 * # [3, "top"]
3409 * # [3, "bottom"]
3410 * # ...
3411 * # [9, "top"]
3412 * # [9, "bottom"]
3413 *
3414 * The method used against each enumerable object is `each_entry`
3415 * instead of `each` so that the product of N enumerable objects
3416 * yields an array of exactly N elements in each iteration.
3417 *
3418 * When no enumerator is given, it calls a given block once yielding
3419 * an empty argument list.
3420 *
3421 * This type of objects can be created by Enumerator.product.
3422 */
3423
3424static void
3425enum_product_mark(void *p)
3426{
3427 struct enum_product *ptr = p;
3428 rb_gc_mark_movable(ptr->enums);
3429}
3430
3431static void
3432enum_product_compact(void *p)
3433{
3434 struct enum_product *ptr = p;
3435 ptr->enums = rb_gc_location(ptr->enums);
3436}
3437
3438#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3439
3440static size_t
3441enum_product_memsize(const void *p)
3442{
3443 return sizeof(struct enum_product);
3444}
3445
3446static const rb_data_type_t enum_product_data_type = {
3447 "product",
3448 {
3449 enum_product_mark,
3450 enum_product_free,
3451 enum_product_memsize,
3452 enum_product_compact,
3453 },
3454 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3455};
3456
3457static struct enum_product *
3458enum_product_ptr(VALUE obj)
3459{
3460 struct enum_product *ptr;
3461
3462 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3463 if (!ptr || UNDEF_P(ptr->enums)) {
3464 rb_raise(rb_eArgError, "uninitialized product");
3465 }
3466 return ptr;
3467}
3468
3469/* :nodoc: */
3470static VALUE
3471enum_product_allocate(VALUE klass)
3472{
3473 struct enum_product *ptr;
3474 VALUE obj;
3475
3476 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3477 ptr->enums = Qundef;
3478
3479 return obj;
3480}
3481
3482/*
3483 * call-seq:
3484 * Enumerator::Product.new(*enums) -> enum
3485 *
3486 * Generates a new enumerator object that generates a Cartesian
3487 * product of given enumerable objects.
3488 *
3489 * e = Enumerator::Product.new(1..3, [4, 5])
3490 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3491 * e.size #=> 6
3492 */
3493static VALUE
3494enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3495{
3496 struct enum_product *ptr;
3497 VALUE enums = Qnil, options = Qnil;
3498
3499 rb_scan_args(argc, argv, "*:", &enums, &options);
3500
3501 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3502 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3503 }
3504
3505 rb_check_frozen(obj);
3506 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3507
3508 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3509
3510 ptr->enums = rb_ary_freeze(enums);
3511
3512 return obj;
3513}
3514
3515/* :nodoc: */
3516static VALUE
3517enum_product_init_copy(VALUE obj, VALUE orig)
3518{
3519 struct enum_product *ptr0, *ptr1;
3520
3521 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3522 ptr0 = enum_product_ptr(orig);
3523
3524 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3525
3526 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3527
3528 ptr1->enums = ptr0->enums;
3529
3530 return obj;
3531}
3532
3533static VALUE
3534enum_product_total_size(VALUE enums)
3535{
3536 VALUE total = INT2FIX(1);
3537 VALUE sizes = rb_ary_hidden_new(RARRAY_LEN(enums));
3538 long i;
3539
3540 for (i = 0; i < RARRAY_LEN(enums); i++) {
3541 VALUE size = enum_size(RARRAY_AREF(enums, i));
3542 if (size == INT2FIX(0)) {
3543 rb_ary_resize(sizes, 0);
3544 return size;
3545 }
3546 rb_ary_push(sizes, size);
3547 }
3548 for (i = 0; i < RARRAY_LEN(sizes); i++) {
3549 VALUE size = RARRAY_AREF(sizes, i);
3550
3551 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3552 return size;
3553 }
3554 if (!RB_INTEGER_TYPE_P(size)) {
3555 return Qnil;
3556 }
3557
3558 total = rb_funcall(total, '*', 1, size);
3559 }
3560
3561 return total;
3562}
3563
3564/*
3565 * call-seq:
3566 * obj.size -> int, Float::INFINITY or nil
3567 *
3568 * Returns the total size of the enumerator product calculated by
3569 * multiplying the sizes of enumerables in the product. If any of the
3570 * enumerables reports its size as nil or Float::INFINITY, that value
3571 * is returned as the size.
3572 */
3573static VALUE
3574enum_product_size(VALUE obj)
3575{
3576 return enum_product_total_size(enum_product_ptr(obj)->enums);
3577}
3578
3579static VALUE
3580enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3581{
3582 return enum_product_size(obj);
3583}
3584
3586 VALUE obj;
3587 VALUE block;
3588 int argc;
3589 VALUE *argv;
3590 int index;
3591};
3592
3593static VALUE product_each(VALUE, struct product_state *);
3594
3595static VALUE
3596product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3597{
3598 struct product_state *pstate = (struct product_state *)state;
3599 pstate->argv[pstate->index++] = value;
3600
3601 VALUE val = product_each(pstate->obj, pstate);
3602 pstate->index--;
3603 return val;
3604}
3605
3606static VALUE
3607product_each(VALUE obj, struct product_state *pstate)
3608{
3609 struct enum_product *ptr = enum_product_ptr(obj);
3610 VALUE enums = ptr->enums;
3611
3612 if (pstate->index < pstate->argc) {
3613 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3614
3615 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3616 }
3617 else {
3618 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3619 }
3620
3621 return obj;
3622}
3623
3624static VALUE
3625enum_product_run(VALUE obj, VALUE block)
3626{
3627 struct enum_product *ptr = enum_product_ptr(obj);
3628 int argc = RARRAY_LENINT(ptr->enums);
3629 struct product_state state = {
3630 .obj = obj,
3631 .block = block,
3632 .index = 0,
3633 .argc = argc,
3634 .argv = ALLOCA_N(VALUE, argc),
3635 };
3636
3637 return product_each(obj, &state);
3638}
3639
3640/*
3641 * call-seq:
3642 * obj.each { |...| ... } -> obj
3643 * obj.each -> enumerator
3644 *
3645 * Iterates over the elements of the first enumerable by calling the
3646 * "each_entry" method on it with the given arguments, then proceeds
3647 * to the following enumerables in sequence until all of the
3648 * enumerables are exhausted.
3649 *
3650 * If no block is given, returns an enumerator. Otherwise, returns self.
3651 */
3652static VALUE
3653enum_product_each(VALUE obj)
3654{
3655 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3656
3657 return enum_product_run(obj, rb_block_proc());
3658}
3659
3660/*
3661 * call-seq:
3662 * obj.rewind -> obj
3663 *
3664 * Rewinds the product enumerator by calling the "rewind" method on
3665 * each enumerable in reverse order. Each call is performed only if
3666 * the enumerable responds to the method.
3667 */
3668static VALUE
3669enum_product_rewind(VALUE obj)
3670{
3671 struct enum_product *ptr = enum_product_ptr(obj);
3672 VALUE enums = ptr->enums;
3673 long i;
3674
3675 for (i = 0; i < RARRAY_LEN(enums); i++) {
3676 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3677 }
3678
3679 return obj;
3680}
3681
3682static VALUE
3683inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3684{
3685 VALUE klass = rb_obj_class(obj);
3686 struct enum_product *ptr;
3687
3688 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3689
3690 if (!ptr || UNDEF_P(ptr->enums)) {
3691 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3692 }
3693
3694 if (recur) {
3695 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3696 }
3697
3698 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3699}
3700
3701/*
3702 * call-seq:
3703 * obj.inspect -> string
3704 *
3705 * Returns a printable version of the product enumerator.
3706 */
3707static VALUE
3708enum_product_inspect(VALUE obj)
3709{
3710 return rb_exec_recursive(inspect_enum_product, obj, 0);
3711}
3712
3713/*
3714 * call-seq:
3715 * Enumerator.product(*enums) -> enumerator
3716 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3717 *
3718 * Generates a new enumerator object that generates a Cartesian
3719 * product of given enumerable objects. This is equivalent to
3720 * Enumerator::Product.new.
3721 *
3722 * e = Enumerator.product(1..3, [4, 5])
3723 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3724 * e.size #=> 6
3725 *
3726 * When a block is given, calls the block with each N-element array
3727 * generated and returns +nil+.
3728 */
3729static VALUE
3730enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3731{
3732 VALUE enums = Qnil, options = Qnil, block = Qnil;
3733
3734 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3735
3736 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3737 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3738 }
3739
3740 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3741
3742 if (!NIL_P(block)) {
3743 enum_product_run(obj, block);
3744 return Qnil;
3745 }
3746
3747 return obj;
3748}
3749
3751 struct enumerator enumerator;
3752 VALUE begin;
3753 VALUE end;
3754 VALUE step;
3755 bool exclude_end;
3756};
3757
3758RUBY_REFERENCES(arith_seq_refs) = {
3759 RUBY_REF_EDGE(struct enumerator, obj),
3760 RUBY_REF_EDGE(struct enumerator, args),
3761 RUBY_REF_EDGE(struct enumerator, fib),
3762 RUBY_REF_EDGE(struct enumerator, dst),
3763 RUBY_REF_EDGE(struct enumerator, lookahead),
3764 RUBY_REF_EDGE(struct enumerator, feedvalue),
3765 RUBY_REF_EDGE(struct enumerator, stop_exc),
3766 RUBY_REF_EDGE(struct enumerator, size),
3767 RUBY_REF_EDGE(struct enumerator, procs),
3768
3769 RUBY_REF_EDGE(struct arith_seq, begin),
3770 RUBY_REF_EDGE(struct arith_seq, end),
3771 RUBY_REF_EDGE(struct arith_seq, step),
3772 RUBY_REF_END
3773};
3774
3775static const rb_data_type_t arith_seq_data_type = {
3776 "arithmetic_sequence",
3777 {
3778 RUBY_REFS_LIST_PTR(arith_seq_refs),
3780 NULL, // Nothing allocated externally, so don't need a memsize function
3781 NULL,
3782 },
3783 .parent = &enumerator_data_type,
3784 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
3785};
3786
3787static VALUE
3788arith_seq_allocate(VALUE klass)
3789{
3790 struct arith_seq *ptr;
3791 VALUE enum_obj;
3792
3793 enum_obj = TypedData_Make_Struct(klass, struct arith_seq, &arith_seq_data_type, ptr);
3794 ptr->enumerator.obj = Qundef;
3795
3796 return enum_obj;
3797}
3798
3799/*
3800 * Document-class: Enumerator::ArithmeticSequence
3801 *
3802 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3803 * that is a representation of sequences of numbers with common difference.
3804 * Instances of this class can be generated by the Range#step and Numeric#step
3805 * methods.
3806 *
3807 * The class can be used for slicing Array (see Array#slice) or custom
3808 * collections.
3809 */
3810
3811VALUE
3812rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3813 rb_enumerator_size_func *size_fn,
3814 VALUE beg, VALUE end, VALUE step, int excl)
3815{
3816 VALUE aseq = enumerator_init(arith_seq_allocate(rb_cArithSeq),
3817 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3818 struct arith_seq *ptr;
3819 TypedData_Get_Struct(aseq, struct arith_seq, &enumerator_data_type, ptr);
3820
3821 RB_OBJ_WRITE(aseq, &ptr->begin, beg);
3822 RB_OBJ_WRITE(aseq, &ptr->end, end);
3823 RB_OBJ_WRITE(aseq, &ptr->step, step);
3824 ptr->exclude_end = excl;
3825
3826 return aseq;
3827}
3828
3829/*
3830 * call-seq: aseq.begin -> num or nil
3831 *
3832 * Returns the number that defines the first element of this arithmetic
3833 * sequence.
3834 */
3835static inline VALUE
3836arith_seq_begin(VALUE self)
3837{
3838 struct arith_seq *ptr;
3839 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3840 return ptr->begin;
3841}
3842
3843/*
3844 * call-seq: aseq.end -> num or nil
3845 *
3846 * Returns the number that defines the end of this arithmetic sequence.
3847 */
3848static inline VALUE
3849arith_seq_end(VALUE self)
3850{
3851 struct arith_seq *ptr;
3852 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3853 return ptr->end;
3854}
3855
3856/*
3857 * call-seq: aseq.step -> num
3858 *
3859 * Returns the number that defines the common difference between
3860 * two adjacent elements in this arithmetic sequence.
3861 */
3862static inline VALUE
3863arith_seq_step(VALUE self)
3864{
3865 struct arith_seq *ptr;
3866 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3867 return ptr->step;
3868}
3869
3870/*
3871 * call-seq: aseq.exclude_end? -> true or false
3872 *
3873 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3874 */
3875static inline VALUE
3876arith_seq_exclude_end(VALUE self)
3877{
3878 struct arith_seq *ptr;
3879 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3880 return RBOOL(ptr->exclude_end);
3881}
3882
3883static inline int
3884arith_seq_exclude_end_p(VALUE self)
3885{
3886 struct arith_seq *ptr;
3887 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3888 return ptr->exclude_end;
3889}
3890
3891int
3892rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3893{
3894 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3895 component->begin = arith_seq_begin(obj);
3896 component->end = arith_seq_end(obj);
3897 component->step = arith_seq_step(obj);
3898 component->exclude_end = arith_seq_exclude_end_p(obj);
3899 return 1;
3900 }
3901 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3902 component->step = INT2FIX(1);
3903 return 1;
3904 }
3905
3906 return 0;
3907}
3908
3909VALUE
3910rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3911{
3912 RBIMPL_NONNULL_ARG(begp);
3913 RBIMPL_NONNULL_ARG(lenp);
3914 RBIMPL_NONNULL_ARG(stepp);
3915
3917 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3918 return Qfalse;
3919 }
3920
3921 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3922 *stepp = step;
3923
3924 if (step < 0) {
3925 if (aseq.exclude_end && !NIL_P(aseq.end)) {
3926 /* Handle exclusion before range reversal */
3927 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
3928
3929 /* Don't exclude the previous beginning */
3930 aseq.exclude_end = 0;
3931 }
3932 VALUE tmp = aseq.begin;
3933 aseq.begin = aseq.end;
3934 aseq.end = tmp;
3935 }
3936
3937 if (err == 0 && (step < -1 || step > 1)) {
3938 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
3939 if (*begp > len)
3940 goto out_of_range;
3941 if (*lenp > len)
3942 goto out_of_range;
3943 return Qtrue;
3944 }
3945 }
3946 else {
3947 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
3948 }
3949
3950 out_of_range:
3951 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
3952 return Qnil;
3953}
3954
3955/*
3956 * call-seq:
3957 * aseq.first -> num or nil
3958 * aseq.first(n) -> an_array
3959 *
3960 * Returns the first number in this arithmetic sequence,
3961 * or an array of the first +n+ elements.
3962 */
3963static VALUE
3964arith_seq_first(int argc, VALUE *argv, VALUE self)
3965{
3966 VALUE b, e, s, ary;
3967 long n;
3968 int x;
3969
3970 rb_check_arity(argc, 0, 1);
3971
3972 b = arith_seq_begin(self);
3973 e = arith_seq_end(self);
3974 s = arith_seq_step(self);
3975 if (argc == 0) {
3976 if (NIL_P(b)) {
3977 return Qnil;
3978 }
3979 if (!NIL_P(e)) {
3980 VALUE zero = INT2FIX(0);
3981 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
3982 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
3983 return Qnil;
3984 }
3985 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
3986 return Qnil;
3987 }
3988 }
3989 return b;
3990 }
3991
3992 // TODO: the following code should be extracted as arith_seq_take
3993
3994 n = NUM2LONG(argv[0]);
3995 if (n < 0) {
3996 rb_raise(rb_eArgError, "attempt to take negative size");
3997 }
3998 if (n == 0) {
3999 return rb_ary_new_capa(0);
4000 }
4001
4002 x = arith_seq_exclude_end_p(self);
4003
4004 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
4005 long i = FIX2LONG(b), unit = FIX2LONG(s);
4006 ary = rb_ary_new_capa(n);
4007 while (n > 0 && FIXABLE(i)) {
4008 rb_ary_push(ary, LONG2FIX(i));
4009 i += unit; // FIXABLE + FIXABLE never overflow;
4010 --n;
4011 }
4012 if (n > 0) {
4013 b = LONG2NUM(i);
4014 while (n > 0) {
4015 rb_ary_push(ary, b);
4016 b = rb_big_plus(b, s);
4017 --n;
4018 }
4019 }
4020 return ary;
4021 }
4022 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
4023 long i = FIX2LONG(b);
4024 long end = FIX2LONG(e);
4025 long unit = FIX2LONG(s);
4026 long len;
4027
4028 if (unit >= 0) {
4029 if (!x) end += 1;
4030
4031 len = end - i;
4032 if (len < 0) len = 0;
4033 ary = rb_ary_new_capa((n < len) ? n : len);
4034 while (n > 0 && i < end) {
4035 rb_ary_push(ary, LONG2FIX(i));
4036 if (i + unit < i) break;
4037 i += unit;
4038 --n;
4039 }
4040 }
4041 else {
4042 if (!x) end -= 1;
4043
4044 len = i - end;
4045 if (len < 0) len = 0;
4046 ary = rb_ary_new_capa((n < len) ? n : len);
4047 while (n > 0 && i > end) {
4048 rb_ary_push(ary, LONG2FIX(i));
4049 if (i + unit > i) break;
4050 i += unit;
4051 --n;
4052 }
4053 }
4054 return ary;
4055 }
4056 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4057 /* generate values like ruby_float_step */
4058
4059 double unit = NUM2DBL(s);
4060 double beg = NUM2DBL(b);
4061 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
4062 double len = ruby_float_step_size(beg, end, unit, x);
4063 long i;
4064
4065 if (n > len)
4066 n = (long)len;
4067
4068 if (isinf(unit)) {
4069 if (len > 0) {
4070 ary = rb_ary_new_capa(1);
4071 rb_ary_push(ary, DBL2NUM(beg));
4072 }
4073 else {
4074 ary = rb_ary_new_capa(0);
4075 }
4076 }
4077 else if (unit == 0) {
4078 VALUE val = DBL2NUM(beg);
4079 ary = rb_ary_new_capa(n);
4080 for (i = 0; i < len; ++i) {
4081 rb_ary_push(ary, val);
4082 }
4083 }
4084 else {
4085 ary = rb_ary_new_capa(n);
4086 for (i = 0; i < n; ++i) {
4087 double d = i*unit+beg;
4088 if (unit >= 0 ? end < d : d < end) d = end;
4089 rb_ary_push(ary, DBL2NUM(d));
4090 }
4091 }
4092
4093 return ary;
4094 }
4095
4096 return rb_call_super(argc, argv);
4097}
4098
4099static inline VALUE
4100num_plus(VALUE a, VALUE b)
4101{
4102 if (RB_INTEGER_TYPE_P(a)) {
4103 return rb_int_plus(a, b);
4104 }
4105 else if (RB_FLOAT_TYPE_P(a)) {
4106 return rb_float_plus(a, b);
4107 }
4108 else if (RB_TYPE_P(a, T_RATIONAL)) {
4109 return rb_rational_plus(a, b);
4110 }
4111 else {
4112 return rb_funcallv(a, '+', 1, &b);
4113 }
4114}
4115
4116static inline VALUE
4117num_minus(VALUE a, VALUE b)
4118{
4119 if (RB_INTEGER_TYPE_P(a)) {
4120 return rb_int_minus(a, b);
4121 }
4122 else if (RB_FLOAT_TYPE_P(a)) {
4123 return rb_float_minus(a, b);
4124 }
4125 else if (RB_TYPE_P(a, T_RATIONAL)) {
4126 return rb_rational_minus(a, b);
4127 }
4128 else {
4129 return rb_funcallv(a, '-', 1, &b);
4130 }
4131}
4132
4133static inline VALUE
4134num_mul(VALUE a, VALUE b)
4135{
4136 if (RB_INTEGER_TYPE_P(a)) {
4137 return rb_int_mul(a, b);
4138 }
4139 else if (RB_FLOAT_TYPE_P(a)) {
4140 return rb_float_mul(a, b);
4141 }
4142 else if (RB_TYPE_P(a, T_RATIONAL)) {
4143 return rb_rational_mul(a, b);
4144 }
4145 else {
4146 return rb_funcallv(a, '*', 1, &b);
4147 }
4148}
4149
4150static inline VALUE
4151num_idiv(VALUE a, VALUE b)
4152{
4153 VALUE q;
4154 if (RB_INTEGER_TYPE_P(a)) {
4155 q = rb_int_idiv(a, b);
4156 }
4157 else if (RB_FLOAT_TYPE_P(a)) {
4158 q = rb_float_div(a, b);
4159 }
4160 else if (RB_TYPE_P(a, T_RATIONAL)) {
4161 q = rb_rational_div(a, b);
4162 }
4163 else {
4164 q = rb_funcallv(a, idDiv, 1, &b);
4165 }
4166
4167 if (RB_INTEGER_TYPE_P(q)) {
4168 return q;
4169 }
4170 else if (RB_FLOAT_TYPE_P(q)) {
4171 return rb_float_floor(q, 0);
4172 }
4173 else if (RB_TYPE_P(q, T_RATIONAL)) {
4174 return rb_rational_floor(q, 0);
4175 }
4176 else {
4177 return rb_funcall(q, rb_intern("floor"), 0);
4178 }
4179}
4180
4181/*
4182 * call-seq:
4183 * aseq.last -> num or nil
4184 * aseq.last(n) -> an_array
4185 *
4186 * Returns the last number in this arithmetic sequence,
4187 * or an array of the last +n+ elements.
4188 */
4189static VALUE
4190arith_seq_last(int argc, VALUE *argv, VALUE self)
4191{
4192 VALUE b, e, s, len_1, len, last, nv, ary;
4193 int last_is_adjusted;
4194 long n;
4195
4196 e = arith_seq_end(self);
4197 if (NIL_P(e)) {
4198 rb_raise(rb_eRangeError,
4199 "cannot get the last element of endless arithmetic sequence");
4200 }
4201
4202 b = arith_seq_begin(self);
4203 s = arith_seq_step(self);
4204
4205 len_1 = num_idiv(num_minus(e, b), s);
4206 if (rb_num_negative_int_p(len_1)) {
4207 if (argc == 0) {
4208 return Qnil;
4209 }
4210 return rb_ary_new_capa(0);
4211 }
4212
4213 last = num_plus(b, num_mul(s, len_1));
4214 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4215 last = num_minus(last, s);
4216 }
4217
4218 if (argc == 0) {
4219 return last;
4220 }
4221
4222 if (last_is_adjusted) {
4223 len = len_1;
4224 }
4225 else {
4226 len = rb_int_plus(len_1, INT2FIX(1));
4227 }
4228
4229 rb_scan_args(argc, argv, "1", &nv);
4230 if (!RB_INTEGER_TYPE_P(nv)) {
4231 nv = rb_to_int(nv);
4232 }
4233 if (RTEST(rb_int_gt(nv, len))) {
4234 nv = len;
4235 }
4236 n = NUM2LONG(nv);
4237 if (n < 0) {
4238 rb_raise(rb_eArgError, "negative array size");
4239 }
4240
4241 ary = rb_ary_new_capa(n);
4242 b = rb_int_minus(last, rb_int_mul(s, nv));
4243 while (n) {
4244 b = rb_int_plus(b, s);
4245 rb_ary_push(ary, b);
4246 --n;
4247 }
4248
4249 return ary;
4250}
4251
4252/*
4253 * call-seq:
4254 * aseq.inspect -> string
4255 *
4256 * Convert this arithmetic sequence to a printable form.
4257 */
4258static VALUE
4259arith_seq_inspect(VALUE self)
4260{
4261 struct enumerator *e;
4262 VALUE eobj, str, eargs;
4263 int range_p;
4264
4265 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4266
4267 eobj = rb_attr_get(self, id_receiver);
4268 if (NIL_P(eobj)) {
4269 eobj = e->obj;
4270 }
4271
4272 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4273 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4274
4275 rb_str_buf_append(str, rb_id2str(e->meth));
4276
4277 eargs = rb_attr_get(eobj, id_arguments);
4278 if (NIL_P(eargs)) {
4279 eargs = e->args;
4280 }
4281 if (eargs != Qfalse) {
4282 long argc = RARRAY_LEN(eargs);
4283 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
4284
4285 if (argc > 0) {
4286 VALUE kwds = Qnil;
4287
4288 rb_str_buf_cat2(str, "(");
4289
4290 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
4291 int all_key = TRUE;
4292 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
4293 if (all_key) kwds = argv[--argc];
4294 }
4295
4296 while (argc--) {
4297 VALUE arg = *argv++;
4298
4299 rb_str_append(str, rb_inspect(arg));
4300 rb_str_buf_cat2(str, ", ");
4301 }
4302 if (!NIL_P(kwds)) {
4303 rb_hash_foreach(kwds, kwd_append, str);
4304 }
4305 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
4306 rb_str_buf_cat2(str, ")");
4307 }
4308 }
4309
4310 rb_str_buf_cat2(str, ")");
4311
4312 return str;
4313}
4314
4315/*
4316 * call-seq:
4317 * aseq == obj -> true or false
4318 *
4319 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4320 * has equivalent begin, end, step, and exclude_end? settings.
4321 */
4322static VALUE
4323arith_seq_eq(VALUE self, VALUE other)
4324{
4325 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4326 return Qfalse;
4327 }
4328
4329 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4330 return Qfalse;
4331 }
4332
4333 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4334 return Qfalse;
4335 }
4336
4337 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4338 return Qfalse;
4339 }
4340
4341 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4342 return Qfalse;
4343 }
4344
4345 return Qtrue;
4346}
4347
4348/*
4349 * call-seq:
4350 * aseq.hash -> integer
4351 *
4352 * Compute a hash-value for this arithmetic sequence.
4353 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4354 * values will generate the same hash-value.
4355 *
4356 * See also Object#hash.
4357 */
4358static VALUE
4359arith_seq_hash(VALUE self)
4360{
4361 st_index_t hash;
4362 VALUE v;
4363
4364 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4365 v = rb_hash(arith_seq_begin(self));
4366 hash = rb_hash_uint(hash, NUM2LONG(v));
4367 v = rb_hash(arith_seq_end(self));
4368 hash = rb_hash_uint(hash, NUM2LONG(v));
4369 v = rb_hash(arith_seq_step(self));
4370 hash = rb_hash_uint(hash, NUM2LONG(v));
4371 hash = rb_hash_end(hash);
4372
4373 return ST2FIX(hash);
4374}
4375
4376#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4377
4379 VALUE current;
4380 VALUE end;
4381 VALUE step;
4382 int excl;
4383};
4384
4385/*
4386 * call-seq:
4387 * aseq.each {|i| block } -> aseq
4388 * aseq.each -> aseq
4389 */
4390static VALUE
4391arith_seq_each(VALUE self)
4392{
4393 VALUE c, e, s, len_1, last;
4394 int x;
4395
4396 if (!rb_block_given_p()) return self;
4397
4398 c = arith_seq_begin(self);
4399 e = arith_seq_end(self);
4400 s = arith_seq_step(self);
4401 x = arith_seq_exclude_end_p(self);
4402
4403 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4404 return self;
4405 }
4406
4407 if (NIL_P(e)) {
4408 while (1) {
4409 rb_yield(c);
4410 c = rb_int_plus(c, s);
4411 }
4412
4413 return self;
4414 }
4415
4416 if (rb_equal(s, INT2FIX(0))) {
4417 while (1) {
4418 rb_yield(c);
4419 }
4420
4421 return self;
4422 }
4423
4424 len_1 = num_idiv(num_minus(e, c), s);
4425 last = num_plus(c, num_mul(s, len_1));
4426 if (x && rb_equal(last, e)) {
4427 last = num_minus(last, s);
4428 }
4429
4430 if (rb_num_negative_int_p(s)) {
4431 while (NUM_GE(c, last)) {
4432 rb_yield(c);
4433 c = num_plus(c, s);
4434 }
4435 }
4436 else {
4437 while (NUM_GE(last, c)) {
4438 rb_yield(c);
4439 c = num_plus(c, s);
4440 }
4441 }
4442
4443 return self;
4444}
4445
4446/*
4447 * call-seq:
4448 * aseq.size -> num or nil
4449 *
4450 * Returns the number of elements in this arithmetic sequence if it is a finite
4451 * sequence. Otherwise, returns <code>nil</code>.
4452 */
4453static VALUE
4454arith_seq_size(VALUE self)
4455{
4456 VALUE b, e, s, len_1, len, last;
4457 int x;
4458
4459 b = arith_seq_begin(self);
4460 e = arith_seq_end(self);
4461 s = arith_seq_step(self);
4462 x = arith_seq_exclude_end_p(self);
4463
4464 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4465 double ee, n;
4466
4467 if (NIL_P(e)) {
4468 if (rb_num_negative_int_p(s)) {
4469 ee = -HUGE_VAL;
4470 }
4471 else {
4472 ee = HUGE_VAL;
4473 }
4474 }
4475 else {
4476 ee = NUM2DBL(e);
4477 }
4478
4479 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4480 if (isinf(n)) return DBL2NUM(n);
4481 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4482 return rb_dbl2big(n);
4483 }
4484
4485 if (NIL_P(e)) {
4486 return DBL2NUM(HUGE_VAL);
4487 }
4488
4489 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4490 s = rb_to_int(s);
4491 }
4492
4493 if (rb_equal(s, INT2FIX(0))) {
4494 return DBL2NUM(HUGE_VAL);
4495 }
4496
4497 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4498 if (rb_num_negative_int_p(len_1)) {
4499 return INT2FIX(0);
4500 }
4501
4502 last = rb_int_plus(b, rb_int_mul(s, len_1));
4503 if (x && rb_equal(last, e)) {
4504 len = len_1;
4505 }
4506 else {
4507 len = rb_int_plus(len_1, INT2FIX(1));
4508 }
4509
4510 return len;
4511}
4512
4513#define sym(name) ID2SYM(rb_intern_const(name))
4514void
4515InitVM_Enumerator(void)
4516{
4517 ID id_private = rb_intern_const("private");
4518
4519 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4520 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4521
4522 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4524
4525 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4526 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4527 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4528 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4529 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4530 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4531 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4532 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4533 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4534 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4535 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4536 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4537 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4538 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4539 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4540 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4541 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4543
4544 /* Lazy */
4546 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4547
4548 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4549 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4550 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4551 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4552 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4553 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4554 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4555 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4556 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4557 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4558 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4559 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4560 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4561 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4562 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4563 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4564 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4565 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4566
4567 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4568 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4569 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4570 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4571 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4572 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4573 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4574 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4575 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4576 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4577 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4578 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4579 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4580 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4581 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4582 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4583 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4584
4585 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4586 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4587 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4588 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4589 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4590 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4591 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4592 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4593 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4594 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4595 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4596 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4597 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4598 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4599 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4600 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4601 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4602 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4603 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4604 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4605 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4606 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4607 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4608 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4609 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4610 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4611 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4612 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4613 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4614
4615 lazy_use_super_method = rb_hash_new_with_size(18);
4616 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4617 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4618 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4619 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4620 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4621 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4622 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4623 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4624 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4625 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4626 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4627 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4628 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4629 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4630 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4631 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4632 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4633 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4634 rb_obj_freeze(lazy_use_super_method);
4635 rb_vm_register_global_object(lazy_use_super_method);
4636
4637#if 0 /* for RDoc */
4638 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4639 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4640 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4641 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4642 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4643 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4644#endif
4645 rb_define_alias(rb_cLazy, "force", "to_a");
4646
4648 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4649
4650 /* Generator */
4651 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4652 rb_include_module(rb_cGenerator, rb_mEnumerable);
4653 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4654 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4655 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4656 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4657
4658 /* Yielder */
4659 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4660 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4661 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4662 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4663 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4664 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4665
4666 /* Producer */
4667 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4668 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4669 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4670 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4671
4672 /* Chain */
4673 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4674 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4675 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4676 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4677 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4678 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4679 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4680 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4681 rb_undef_method(rb_cEnumChain, "feed");
4682 rb_undef_method(rb_cEnumChain, "next");
4683 rb_undef_method(rb_cEnumChain, "next_values");
4684 rb_undef_method(rb_cEnumChain, "peek");
4685 rb_undef_method(rb_cEnumChain, "peek_values");
4686
4687 /* Product */
4688 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4689 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4690 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4691 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4692 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4693 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4694 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4695 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4696 rb_undef_method(rb_cEnumProduct, "feed");
4697 rb_undef_method(rb_cEnumProduct, "next");
4698 rb_undef_method(rb_cEnumProduct, "next_values");
4699 rb_undef_method(rb_cEnumProduct, "peek");
4700 rb_undef_method(rb_cEnumProduct, "peek_values");
4701 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4702
4703 /* ArithmeticSequence */
4704 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4705 rb_undef_alloc_func(rb_cArithSeq);
4706 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4707 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4708 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4709 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4710 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4711 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4712 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4713 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4714 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4715 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4716 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4717 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4718 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4719 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4720
4721 rb_provide("enumerator.so"); /* for backward compatibility */
4722}
4723#undef sym
4724
4725void
4726Init_Enumerator(void)
4727{
4728 id_rewind = rb_intern_const("rewind");
4729 id_next = rb_intern_const("next");
4730 id_result = rb_intern_const("result");
4731 id_receiver = rb_intern_const("receiver");
4732 id_arguments = rb_intern_const("arguments");
4733 id_memo = rb_intern_const("memo");
4734 id_method = rb_intern_const("method");
4735 id_force = rb_intern_const("force");
4736 id_to_enum = rb_intern_const("to_enum");
4737 id_each_entry = rb_intern_const("each_entry");
4738 sym_each = ID2SYM(id_each);
4739 sym_yield = ID2SYM(rb_intern_const("yield"));
4740
4741 InitVM(Enumerator);
4742}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1701
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:1484
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:1520
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2848
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:964
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2668
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3138
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:956
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:943
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1682
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:205
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:659
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:405
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:657
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:406
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:682
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:2111
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1434
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1430
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1428
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:180
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1481
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1432
VALUE rb_mKernel
Kernel module.
Definition object.c:61
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:163
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:101
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:196
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:243
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:553
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:657
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:175
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:880
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1299
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3223
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1117
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1194
#define rb_funcall2
Definition eval.h:205
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:362
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_resize(VALUE ary, long len)
Expands or shrinks the passed array to the passed length.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:206
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:765
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:484
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2128
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:847
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1029
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:990
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:120
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1838
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:942
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:945
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:4102
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:2302
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:4068
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3692
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1762
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2079
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1443
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:374
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3094
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1419
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:686
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:680
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:284
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:972
ID rb_to_id(VALUE str)
Definition string.c:13000
int len
Length of the buffer.
Definition io.h:8
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1384
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1406
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1372
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1412
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1559
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:372
#define ALLOCA_N(type, n)
Definition memory.h:292
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
#define RUBY_TYPED_DEFAULT_FREE
This is a value you can set to rb_data_type_struct::dfree.
Definition rtypeddata.h:79
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:515
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:497
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:231
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:108
Definition enumerator.c:235
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:203
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:312
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:264
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376