Ruby 4.1.0dev (2026-01-06 revision 5d26a2aeea1368c5e37cb75ca511e62c5e21960f)
enumerator.c (5d26a2aeea1368c5e37cb75ca511e62c5e21960f)
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include "id.h"
22#include "internal.h"
23#include "internal/class.h"
24#include "internal/enumerator.h"
25#include "internal/error.h"
26#include "internal/hash.h"
27#include "internal/imemo.h"
28#include "internal/numeric.h"
29#include "internal/range.h"
30#include "internal/rational.h"
31#include "ruby/ruby.h"
32
33/*
34 * Document-class: Enumerator
35 *
36 * A class which allows both internal and external iteration.
37 *
38 * An Enumerator can be created by the following methods.
39 * - Object#to_enum
40 * - Object#enum_for
41 * - Enumerator.new
42 *
43 * Most methods have two forms: a block form where the contents
44 * are evaluated for each item in the enumeration, and a non-block form
45 * which returns a new Enumerator wrapping the iteration.
46 *
47 * enumerator = %w(one two three).each
48 * puts enumerator.class # => Enumerator
49 *
50 * enumerator.each_with_object("foo") do |item, obj|
51 * puts "#{obj}: #{item}"
52 * end
53 *
54 * # foo: one
55 * # foo: two
56 * # foo: three
57 *
58 * enum_with_obj = enumerator.each_with_object("foo")
59 * puts enum_with_obj.class # => Enumerator
60 *
61 * enum_with_obj.each do |item, obj|
62 * puts "#{obj}: #{item}"
63 * end
64 *
65 * # foo: one
66 * # foo: two
67 * # foo: three
68 *
69 * This allows you to chain Enumerators together. For example, you
70 * can map a list's elements to strings containing the index
71 * and the element as a string via:
72 *
73 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
74 * # => ["0:foo", "1:bar", "2:baz"]
75 *
76 * == External Iteration
77 *
78 * An Enumerator can also be used as an external iterator.
79 * For example, Enumerator#next returns the next value of the iterator
80 * or raises StopIteration if the Enumerator is at the end.
81 *
82 * e = [1,2,3].each # returns an enumerator object.
83 * puts e.next # => 1
84 * puts e.next # => 2
85 * puts e.next # => 3
86 * puts e.next # raises StopIteration
87 *
88 * +next+, +next_values+, +peek+, and +peek_values+ are the only methods
89 * which use external iteration (and Array#zip(Enumerable-not-Array) which uses +next+ internally).
90 *
91 * These methods do not affect other internal enumeration methods,
92 * unless the underlying iteration method itself has side-effect, e.g. IO#each_line.
93 *
94 * FrozenError will be raised if these methods are called against a frozen enumerator.
95 * Since +rewind+ and +feed+ also change state for external iteration,
96 * these methods may raise FrozenError too.
97 *
98 * External iteration differs *significantly* from internal iteration
99 * due to using a Fiber:
100 * - The Fiber adds some overhead compared to internal enumeration.
101 * - The stacktrace will only include the stack from the Enumerator, not above.
102 * - Fiber-local variables are *not* inherited inside the Enumerator Fiber,
103 * which instead starts with no Fiber-local variables.
104 * - Fiber storage variables *are* inherited and are designed
105 * to handle Enumerator Fibers. Assigning to a Fiber storage variable
106 * only affects the current Fiber, so if you want to change state
107 * in the caller Fiber of the Enumerator Fiber, you need to use an
108 * extra indirection (e.g., use some object in the Fiber storage
109 * variable and mutate some ivar of it).
110 *
111 * Concretely:
112 *
113 * Thread.current[:fiber_local] = 1
114 * Fiber[:storage_var] = 1
115 * e = Enumerator.new do |y|
116 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
117 * p Fiber[:storage_var] # => 1, inherited
118 * Fiber[:storage_var] += 1
119 * y << 42
120 * end
121 *
122 * p e.next # => 42
123 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
124 *
125 * e.each { p _1 }
126 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
127 *
128 * == Convert External Iteration to Internal Iteration
129 *
130 * You can use an external iterator to implement an internal iterator as follows:
131 *
132 * def ext_each(e)
133 * while true
134 * begin
135 * vs = e.next_values
136 * rescue StopIteration
137 * return $!.result
138 * end
139 * y = yield(*vs)
140 * e.feed y
141 * end
142 * end
143 *
144 * o = Object.new
145 *
146 * def o.each
147 * puts yield
148 * puts yield(1)
149 * puts yield(1, 2)
150 * 3
151 * end
152 *
153 * # use o.each as an internal iterator directly.
154 * puts o.each {|*x| puts x; [:b, *x] }
155 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
156 *
157 * # convert o.each to an external iterator for
158 * # implementing an internal iterator.
159 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
160 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
161 *
162 */
164static VALUE rb_cLazy;
165static ID id_rewind, id_to_enum, id_each_entry;
166static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
167static VALUE sym_each, sym_yield;
168
169static VALUE lazy_use_super_method;
170
171extern ID ruby_static_id_cause;
172
173#define id_call idCall
174#define id_cause ruby_static_id_cause
175#define id_each idEach
176#define id_eqq idEqq
177#define id_initialize idInitialize
178#define id_size idSize
179
181
183 VALUE obj;
184 ID meth;
185 VALUE args;
186 VALUE fib;
187 VALUE dst;
188 VALUE lookahead;
189 VALUE feedvalue;
190 VALUE stop_exc;
191 VALUE size;
192 VALUE procs;
194 int kw_splat;
195};
196
197RUBY_REFERENCES(enumerator_refs) = {
198 RUBY_REF_EDGE(struct enumerator, obj),
199 RUBY_REF_EDGE(struct enumerator, args),
200 RUBY_REF_EDGE(struct enumerator, fib),
201 RUBY_REF_EDGE(struct enumerator, dst),
202 RUBY_REF_EDGE(struct enumerator, lookahead),
203 RUBY_REF_EDGE(struct enumerator, feedvalue),
204 RUBY_REF_EDGE(struct enumerator, stop_exc),
205 RUBY_REF_EDGE(struct enumerator, size),
206 RUBY_REF_EDGE(struct enumerator, procs),
207 RUBY_REF_END
208};
209
210static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
211
212struct generator {
213 VALUE proc;
214 VALUE obj;
215};
216
217struct yielder {
218 VALUE proc;
219};
220
221struct producer {
222 VALUE init;
223 VALUE proc;
224 VALUE size;
225};
226
227typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
228typedef VALUE lazyenum_size_func(VALUE, VALUE);
229typedef int lazyenum_precheck_func(VALUE proc_entry);
230typedef struct {
231 lazyenum_proc_func *proc;
232 lazyenum_size_func *size;
233 lazyenum_precheck_func *precheck;
235
237 VALUE proc;
238 VALUE memo;
239 const lazyenum_funcs *fn;
240};
241
242static VALUE generator_allocate(VALUE klass);
243static VALUE generator_init(VALUE obj, VALUE proc);
244
245static VALUE rb_cEnumChain;
246
248 VALUE enums;
249 long pos;
250};
251
252static VALUE rb_cEnumProduct;
253
255 VALUE enums;
256};
257
258VALUE rb_cArithSeq;
259
260static const rb_data_type_t enumerator_data_type = {
261 "enumerator",
262 {
263 RUBY_REFS_LIST_PTR(enumerator_refs),
265 NULL, // Nothing allocated externally, so don't need a memsize function
266 NULL,
267 },
268 0, NULL, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
269};
270
271static struct enumerator *
272enumerator_ptr(VALUE obj)
273{
274 struct enumerator *ptr;
275
276 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
277 if (!ptr || UNDEF_P(ptr->obj)) {
278 rb_raise(rb_eArgError, "uninitialized enumerator");
279 }
280 return ptr;
281}
282
283static void
284proc_entry_mark_and_move(void *p)
285{
286 struct proc_entry *ptr = p;
287 rb_gc_mark_and_move(&ptr->proc);
288 rb_gc_mark_and_move(&ptr->memo);
289}
290
291static const rb_data_type_t proc_entry_data_type = {
292 "proc_entry",
293 {
294 proc_entry_mark_and_move,
296 NULL, // Nothing allocated externally, so don't need a memsize function
297 proc_entry_mark_and_move,
298 },
299 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
300};
301
302static struct proc_entry *
303proc_entry_ptr(VALUE proc_entry)
304{
305 struct proc_entry *ptr;
306
307 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
308
309 return ptr;
310}
311
312/*
313 * call-seq:
314 * obj.to_enum(method = :each, *args) -> enum
315 * obj.enum_for(method = :each, *args) -> enum
316 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
317 * obj.enum_for(method = :each, *args){|*args| block} -> enum
318 *
319 * Creates a new Enumerator which will enumerate by calling +method+ on
320 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
321 * values of enumerator.
322 *
323 * If a block is given, it will be used to calculate the size of
324 * the enumerator without the need to iterate it (see Enumerator#size).
325 *
326 * === Examples
327 *
328 * str = "xyz"
329 *
330 * enum = str.enum_for(:each_byte)
331 * enum.each { |b| puts b }
332 * # => 120
333 * # => 121
334 * # => 122
335 *
336 * # protect an array from being modified by some_method
337 * a = [1, 2, 3]
338 * some_method(a.to_enum)
339 *
340 * # String#split in block form is more memory-effective:
341 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
342 * # This could be rewritten more idiomatically with to_enum:
343 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
344 *
345 * It is typical to call to_enum when defining methods for
346 * a generic Enumerable, in case no block is passed.
347 *
348 * Here is such an example, with parameter passing and a sizing block:
349 *
350 * module Enumerable
351 * # a generic method to repeat the values of any enumerable
352 * def repeat(n)
353 * raise ArgumentError, "#{n} is negative!" if n < 0
354 * unless block_given?
355 * return to_enum(__method__, n) do # __method__ is :repeat here
356 * sz = size # Call size and multiply by n...
357 * sz * n if sz # but return nil if size itself is nil
358 * end
359 * end
360 * each do |*val|
361 * n.times { yield *val }
362 * end
363 * end
364 * end
365 *
366 * %i[hello world].repeat(2) { |w| puts w }
367 * # => Prints 'hello', 'hello', 'world', 'world'
368 * enum = (1..14).repeat(3)
369 * # => returns an Enumerator when called without a block
370 * enum.first(4) # => [1, 1, 1, 2]
371 * enum.size # => 42
372 */
373static VALUE
374obj_to_enum(int argc, VALUE *argv, VALUE obj)
375{
376 VALUE enumerator, meth = sym_each;
377
378 if (argc > 0) {
379 --argc;
380 meth = *argv++;
381 }
382 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
383 if (rb_block_given_p()) {
384 RB_OBJ_WRITE(enumerator, &enumerator_ptr(enumerator)->size, rb_block_proc());
385 }
386 return enumerator;
387}
388
389static VALUE
390enumerator_allocate(VALUE klass)
391{
392 struct enumerator *ptr;
393 VALUE enum_obj;
394
395 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
396 ptr->obj = Qundef;
397
398 return enum_obj;
399}
400
401static VALUE
402enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
403{
404 struct enumerator *ptr;
405
406 rb_check_frozen(enum_obj);
407 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
408
409 if (!ptr) {
410 rb_raise(rb_eArgError, "unallocated enumerator");
411 }
412
413 RB_OBJ_WRITE(enum_obj, &ptr->obj, obj);
414 ptr->meth = rb_to_id(meth);
415 if (argc) RB_OBJ_WRITE(enum_obj, &ptr->args, rb_ary_new4(argc, argv));
416 ptr->fib = 0;
417 ptr->dst = Qnil;
418 ptr->lookahead = Qundef;
419 ptr->feedvalue = Qundef;
420 ptr->stop_exc = Qfalse;
421 RB_OBJ_WRITE(enum_obj, &ptr->size, size);
422 ptr->size_fn = size_fn;
423 ptr->kw_splat = kw_splat;
424
425 return enum_obj;
426}
427
428static VALUE
429convert_to_feasible_size_value(VALUE obj)
430{
431 if (NIL_P(obj)) {
432 return obj;
433 }
434 else if (rb_respond_to(obj, id_call)) {
435 return obj;
436 }
437 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
438 return obj;
439 }
440 else {
441 return rb_to_int(obj);
442 }
443}
444
445/*
446 * call-seq:
447 * Enumerator.new(size = nil) { |yielder| ... }
448 *
449 * Creates a new Enumerator object, which can be used as an
450 * Enumerable.
451 *
452 * Iteration is defined by the given block, in
453 * which a "yielder" object, given as block parameter, can be used to
454 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
455 *
456 * fib = Enumerator.new do |y|
457 * a = b = 1
458 * loop do
459 * y << a
460 * a, b = b, a + b
461 * end
462 * end
463 *
464 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
465 *
466 * The optional parameter can be used to specify how to calculate the size
467 * in a lazy fashion (see Enumerator#size). It can either be a value or
468 * a callable object.
469 */
470static VALUE
471enumerator_initialize(int argc, VALUE *argv, VALUE obj)
472{
473 VALUE iter = rb_block_proc();
474 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
475 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
476 VALUE size = convert_to_feasible_size_value(arg0);
477
478 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
479}
480
481/* :nodoc: */
482static VALUE
483enumerator_init_copy(VALUE obj, VALUE orig)
484{
485 struct enumerator *ptr0, *ptr1;
486
487 if (!OBJ_INIT_COPY(obj, orig)) return obj;
488 ptr0 = enumerator_ptr(orig);
489 if (ptr0->fib) {
490 /* Fibers cannot be copied */
491 rb_raise(rb_eTypeError, "can't copy execution context");
492 }
493
494 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
495
496 if (!ptr1) {
497 rb_raise(rb_eArgError, "unallocated enumerator");
498 }
499
500 RB_OBJ_WRITE(obj, &ptr1->obj, ptr0->obj);
501 ptr1->meth = ptr0->meth;
502 RB_OBJ_WRITE(obj, &ptr1->args, ptr0->args);
503 ptr1->fib = 0;
504 ptr1->lookahead = Qundef;
505 ptr1->feedvalue = Qundef;
506 RB_OBJ_WRITE(obj, &ptr1->size, ptr0->size);
507 ptr1->size_fn = ptr0->size_fn;
508
509 return obj;
510}
511
512/*
513 * For backwards compatibility; use rb_enumeratorize_with_size
514 */
515VALUE
516rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
517{
518 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
519}
520
521static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
522static int lazy_precheck(VALUE procs);
523
524VALUE
525rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
526{
527 VALUE base_class = rb_cEnumerator;
528
529 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
530 base_class = rb_cLazy;
531 }
532 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
533 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
534 }
535
536 return enumerator_init(enumerator_allocate(base_class),
537 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
538}
539
540VALUE
541rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
542{
543 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
544}
545
546static VALUE
547enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
548{
549 int argc = 0;
550 const VALUE *argv = 0;
551 const struct enumerator *e = enumerator_ptr(obj);
552 ID meth = e->meth;
553
554 VALUE args = e->args;
555 if (args) {
556 argc = RARRAY_LENINT(args);
557 argv = RARRAY_CONST_PTR(args);
558 }
559
560 VALUE ret = rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
561
562 RB_GC_GUARD(args);
563
564 return ret;
565}
566
567/*
568 * call-seq:
569 * enum.each { |elm| block } -> obj
570 * enum.each -> enum
571 * enum.each(*appending_args) { |elm| block } -> obj
572 * enum.each(*appending_args) -> an_enumerator
573 *
574 * Iterates over the block according to how this Enumerator was constructed.
575 * If no block and no arguments are given, returns self.
576 *
577 * === Examples
578 *
579 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
580 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
581 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
582 *
583 * obj = Object.new
584 *
585 * def obj.each_arg(a, b=:b, *rest)
586 * yield a
587 * yield b
588 * yield rest
589 * :method_returned
590 * end
591 *
592 * enum = obj.to_enum :each_arg, :a, :x
593 *
594 * enum.each.to_a #=> [:a, :x, []]
595 * enum.each.equal?(enum) #=> true
596 * enum.each { |elm| elm } #=> :method_returned
597 *
598 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
599 * enum.each(:y, :z).equal?(enum) #=> false
600 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
601 *
602 */
603static VALUE
604enumerator_each(int argc, VALUE *argv, VALUE obj)
605{
606 struct enumerator *e = enumerator_ptr(obj);
607
608 if (argc > 0) {
609 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
610 if (args) {
611#if SIZEOF_INT < SIZEOF_LONG
612 /* check int range overflow */
613 rb_long2int(RARRAY_LEN(args) + argc);
614#endif
615 args = rb_ary_dup(args);
616 rb_ary_cat(args, argv, argc);
617 }
618 else {
619 args = rb_ary_new4(argc, argv);
620 }
621 RB_OBJ_WRITE(obj, &e->args, args);
622 e->size = Qnil;
623 e->size_fn = 0;
624 }
625 if (!rb_block_given_p()) return obj;
626
627 if (!lazy_precheck(e->procs)) return Qnil;
628
629 return enumerator_block_call(obj, 0, obj);
630}
631
632static VALUE
633enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
634{
635 struct MEMO *memo = (struct MEMO *)m;
636 VALUE idx = memo->v1;
637 MEMO_V1_SET(memo, rb_int_succ(idx));
638
639 if (argc <= 1)
640 return rb_yield_values(2, val, idx);
641
642 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
643}
644
645static VALUE
646enumerator_size(VALUE obj);
647
648static VALUE
649enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
650{
651 return enumerator_size(obj);
652}
653
654/*
655 * call-seq:
656 * e.with_index(offset = 0) {|(*args), idx| ... }
657 * e.with_index(offset = 0)
658 *
659 * Iterates the given block for each element with an index, which
660 * starts from +offset+. If no block is given, returns a new Enumerator
661 * that includes the index, starting from +offset+
662 *
663 * +offset+:: the starting index to use
664 *
665 */
666static VALUE
667enumerator_with_index(int argc, VALUE *argv, VALUE obj)
668{
669 VALUE memo;
670
671 rb_check_arity(argc, 0, 1);
672 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
673 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
674 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)rb_imemo_memo_new(memo, 0, 0));
675}
676
677/*
678 * call-seq:
679 * e.each_with_index {|(*args), idx| ... }
680 * e.each_with_index
681 *
682 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
683 *
684 * If no block is given, a new Enumerator is returned that includes the index.
685 *
686 */
687static VALUE
688enumerator_each_with_index(VALUE obj)
689{
690 return enumerator_with_index(0, NULL, obj);
691}
692
693static VALUE
694enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
695{
696 if (argc <= 1)
697 return rb_yield_values(2, val, memo);
698
699 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
700}
701
702/*
703 * call-seq:
704 * e.each_with_object(obj) {|(*args), obj| ... }
705 * e.each_with_object(obj)
706 * e.with_object(obj) {|(*args), obj| ... }
707 * e.with_object(obj)
708 *
709 * Iterates the given block for each element with an arbitrary object, +obj+,
710 * and returns +obj+
711 *
712 * If no block is given, returns a new Enumerator.
713 *
714 * === Example
715 *
716 * to_three = Enumerator.new do |y|
717 * 3.times do |x|
718 * y << x
719 * end
720 * end
721 *
722 * to_three_with_string = to_three.with_object("foo")
723 * to_three_with_string.each do |x,string|
724 * puts "#{string}: #{x}"
725 * end
726 *
727 * # => foo: 0
728 * # => foo: 1
729 * # => foo: 2
730 */
731static VALUE
732enumerator_with_object(VALUE obj, VALUE memo)
733{
734 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
735 enumerator_block_call(obj, enumerator_with_object_i, memo);
736
737 return memo;
738}
739
740static VALUE
741next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
742{
743 struct enumerator *e = enumerator_ptr(obj);
744 VALUE feedvalue = Qnil;
745 VALUE args = rb_ary_new4(argc, argv);
746 rb_fiber_yield(1, &args);
747 if (!UNDEF_P(e->feedvalue)) {
748 feedvalue = e->feedvalue;
749 e->feedvalue = Qundef;
750 }
751 return feedvalue;
752}
753
754static VALUE
755next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
756{
757 struct enumerator *e = enumerator_ptr(obj);
758 VALUE nil = Qnil;
759 VALUE result;
760
761 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
762 RB_OBJ_WRITE(obj, &e->stop_exc, rb_exc_new2(rb_eStopIteration, "iteration reached an end"));
763 rb_ivar_set(e->stop_exc, id_result, result);
764 return rb_fiber_yield(1, &nil);
765}
766
767static void
768next_init(VALUE obj, struct enumerator *e)
769{
770 VALUE curr = rb_fiber_current();
771 RB_OBJ_WRITE(obj, &e->dst, curr);
772 RB_OBJ_WRITE(obj, &e->fib, rb_fiber_new(next_i, obj));
773 e->lookahead = Qundef;
774}
775
776static VALUE
777get_next_values(VALUE obj, struct enumerator *e)
778{
779 VALUE curr, vs;
780
781 if (e->stop_exc) {
782 VALUE exc = e->stop_exc;
783 VALUE result = rb_attr_get(exc, id_result);
784 VALUE mesg = rb_attr_get(exc, idMesg);
785 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
786 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
787 rb_ivar_set(stop_exc, id_cause, exc);
788 rb_ivar_set(stop_exc, id_result, result);
789 rb_exc_raise(stop_exc);
790 }
791
792 curr = rb_fiber_current();
793
794 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
795 next_init(obj, e);
796 }
797
798 vs = rb_fiber_resume(e->fib, 1, &curr);
799 if (e->stop_exc) {
800 e->fib = 0;
801 e->dst = Qnil;
802 e->lookahead = Qundef;
803 e->feedvalue = Qundef;
804 rb_exc_raise(e->stop_exc);
805 }
806 return vs;
807}
808
809/*
810 * call-seq:
811 * e.next_values -> array
812 *
813 * Returns the next object as an array in the enumerator, and move the
814 * internal position forward. When the position reached at the end,
815 * StopIteration is raised.
816 *
817 * See class-level notes about external iterators.
818 *
819 * This method can be used to distinguish <code>yield</code> and <code>yield
820 * nil</code>.
821 *
822 * === Example
823 *
824 * o = Object.new
825 * def o.each
826 * yield
827 * yield 1
828 * yield 1, 2
829 * yield nil
830 * yield [1, 2]
831 * end
832 * e = o.to_enum
833 * p e.next_values
834 * p e.next_values
835 * p e.next_values
836 * p e.next_values
837 * p e.next_values
838 * e = o.to_enum
839 * p e.next
840 * p e.next
841 * p e.next
842 * p e.next
843 * p e.next
844 *
845 * ## yield args next_values next
846 * # yield [] nil
847 * # yield 1 [1] 1
848 * # yield 1, 2 [1, 2] [1, 2]
849 * # yield nil [nil] nil
850 * # yield [1, 2] [[1, 2]] [1, 2]
851 *
852 */
853
854static VALUE
855enumerator_next_values(VALUE obj)
856{
857 struct enumerator *e = enumerator_ptr(obj);
858 VALUE vs;
859
860 rb_check_frozen(obj);
861
862 if (!UNDEF_P(e->lookahead)) {
863 vs = e->lookahead;
864 e->lookahead = Qundef;
865 return vs;
866 }
867
868 return get_next_values(obj, e);
869}
870
871static VALUE
872ary2sv(VALUE args, int dup)
873{
874 if (!RB_TYPE_P(args, T_ARRAY))
875 return args;
876
877 switch (RARRAY_LEN(args)) {
878 case 0:
879 return Qnil;
880
881 case 1:
882 return RARRAY_AREF(args, 0);
883
884 default:
885 if (dup)
886 return rb_ary_dup(args);
887 return args;
888 }
889}
890
891/*
892 * call-seq:
893 * e.next -> object
894 *
895 * Returns the next object in the enumerator, and move the internal position
896 * forward. When the position reached at the end, StopIteration is raised.
897 *
898 * === Example
899 *
900 * a = [1,2,3]
901 * e = a.to_enum
902 * p e.next #=> 1
903 * p e.next #=> 2
904 * p e.next #=> 3
905 * p e.next #raises StopIteration
906 *
907 * See class-level notes about external iterators.
908 *
909 */
910
911static VALUE
912enumerator_next(VALUE obj)
913{
914 VALUE vs = enumerator_next_values(obj);
915 return ary2sv(vs, 0);
916}
917
918static VALUE
919enumerator_peek_values(VALUE obj)
920{
921 struct enumerator *e = enumerator_ptr(obj);
922
923 rb_check_frozen(obj);
924
925 if (UNDEF_P(e->lookahead)) {
926 RB_OBJ_WRITE(obj, &e->lookahead, get_next_values(obj, e));
927 }
928
929 return e->lookahead;
930}
931
932/*
933 * call-seq:
934 * e.peek_values -> array
935 *
936 * Returns the next object as an array, similar to Enumerator#next_values, but
937 * doesn't move the internal position forward. If the position is already at
938 * the end, StopIteration is raised.
939 *
940 * See class-level notes about external iterators.
941 *
942 * === Example
943 *
944 * o = Object.new
945 * def o.each
946 * yield
947 * yield 1
948 * yield 1, 2
949 * end
950 * e = o.to_enum
951 * p e.peek_values #=> []
952 * e.next
953 * p e.peek_values #=> [1]
954 * p e.peek_values #=> [1]
955 * e.next
956 * p e.peek_values #=> [1, 2]
957 * e.next
958 * p e.peek_values # raises StopIteration
959 *
960 */
961
962static VALUE
963enumerator_peek_values_m(VALUE obj)
964{
965 return rb_ary_dup(enumerator_peek_values(obj));
966}
967
968/*
969 * call-seq:
970 * e.peek -> object
971 *
972 * Returns the next object in the enumerator, but doesn't move the internal
973 * position forward. If the position is already at the end, StopIteration
974 * is raised.
975 *
976 * See class-level notes about external iterators.
977 *
978 * === Example
979 *
980 * a = [1,2,3]
981 * e = a.to_enum
982 * p e.next #=> 1
983 * p e.peek #=> 2
984 * p e.peek #=> 2
985 * p e.peek #=> 2
986 * p e.next #=> 2
987 * p e.next #=> 3
988 * p e.peek #raises StopIteration
989 *
990 */
991
992static VALUE
993enumerator_peek(VALUE obj)
994{
995 VALUE vs = enumerator_peek_values(obj);
996 return ary2sv(vs, 1);
997}
998
999/*
1000 * call-seq:
1001 * e.feed obj -> nil
1002 *
1003 * Sets the value to be returned by the next yield inside +e+.
1004 *
1005 * If the value is not set, the yield returns nil.
1006 *
1007 * This value is cleared after being yielded.
1008 *
1009 * # Array#map passes the array's elements to "yield" and collects the
1010 * # results of "yield" as an array.
1011 * # Following example shows that "next" returns the passed elements and
1012 * # values passed to "feed" are collected as an array which can be
1013 * # obtained by StopIteration#result.
1014 * e = [1,2,3].map
1015 * p e.next #=> 1
1016 * e.feed "a"
1017 * p e.next #=> 2
1018 * e.feed "b"
1019 * p e.next #=> 3
1020 * e.feed "c"
1021 * begin
1022 * e.next
1023 * rescue StopIteration
1024 * p $!.result #=> ["a", "b", "c"]
1025 * end
1026 *
1027 * o = Object.new
1028 * def o.each
1029 * x = yield # (2) blocks
1030 * p x # (5) => "foo"
1031 * x = yield # (6) blocks
1032 * p x # (8) => nil
1033 * x = yield # (9) blocks
1034 * p x # not reached w/o another e.next
1035 * end
1036 *
1037 * e = o.to_enum
1038 * e.next # (1)
1039 * e.feed "foo" # (3)
1040 * e.next # (4)
1041 * e.next # (7)
1042 * # (10)
1043 */
1044
1045static VALUE
1046enumerator_feed(VALUE obj, VALUE v)
1047{
1048 struct enumerator *e = enumerator_ptr(obj);
1049
1050 rb_check_frozen(obj);
1051
1052 if (!UNDEF_P(e->feedvalue)) {
1053 rb_raise(rb_eTypeError, "feed value already set");
1054 }
1055 RB_OBJ_WRITE(obj, &e->feedvalue, v);
1056
1057 return Qnil;
1058}
1059
1060/*
1061 * call-seq:
1062 * e.rewind -> e
1063 *
1064 * Rewinds the enumeration sequence to the beginning.
1065 *
1066 * If the enclosed object responds to a "rewind" method, it is called.
1067 */
1068
1069static VALUE
1070enumerator_rewind(VALUE obj)
1071{
1072 struct enumerator *e = enumerator_ptr(obj);
1073
1074 rb_check_frozen(obj);
1075
1076 rb_check_funcall(e->obj, id_rewind, 0, 0);
1077
1078 e->fib = 0;
1079 e->dst = Qnil;
1080 e->lookahead = Qundef;
1081 e->feedvalue = Qundef;
1082 e->stop_exc = Qfalse;
1083 return obj;
1084}
1085
1086static struct generator *generator_ptr(VALUE obj);
1087static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1088
1089static VALUE
1090inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1091{
1092 struct enumerator *e;
1093 VALUE eobj, str, cname;
1094
1095 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1096
1097 cname = rb_obj_class(obj);
1098
1099 if (!e || UNDEF_P(e->obj)) {
1100 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1101 }
1102
1103 if (recur) {
1104 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1105 return str;
1106 }
1107
1108 if (e->procs) {
1109 long i;
1110
1111 eobj = generator_ptr(e->obj)->obj;
1112 /* In case procs chained enumerator traversing all proc entries manually */
1113 if (rb_obj_class(eobj) == cname) {
1114 str = rb_inspect(eobj);
1115 }
1116 else {
1117 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1118 }
1119 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1120 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1121 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1122 rb_str_buf_cat2(str, ">");
1123 }
1124 return str;
1125 }
1126
1127 eobj = rb_attr_get(obj, id_receiver);
1128 if (NIL_P(eobj)) {
1129 eobj = e->obj;
1130 }
1131
1132 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1133 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1134 append_method(obj, str, e->meth, e->args);
1135
1136 rb_str_buf_cat2(str, ">");
1137
1138 return str;
1139}
1140
1141static int
1142key_symbol_p(VALUE key, VALUE val, VALUE arg)
1143{
1144 if (SYMBOL_P(key)) return ST_CONTINUE;
1145 *(int *)arg = FALSE;
1146 return ST_STOP;
1147}
1148
1149static int
1150kwd_append(VALUE key, VALUE val, VALUE str)
1151{
1152 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1153 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1154 return ST_CONTINUE;
1155}
1156
1157static VALUE
1158append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1159{
1160 VALUE method, eargs;
1161
1162 method = rb_attr_get(obj, id_method);
1163 if (method != Qfalse) {
1164 if (!NIL_P(method)) {
1165 Check_Type(method, T_SYMBOL);
1166 method = rb_sym2str(method);
1167 }
1168 else {
1169 method = rb_id2str(default_method);
1170 }
1171 rb_str_buf_cat2(str, ":");
1172 rb_str_buf_append(str, method);
1173 }
1174
1175 eargs = rb_attr_get(obj, id_arguments);
1176 if (NIL_P(eargs)) {
1177 eargs = default_args;
1178 }
1179 if (eargs != Qfalse) {
1180 long argc = RARRAY_LEN(eargs);
1181 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1182
1183 if (argc > 0) {
1184 VALUE kwds = Qnil;
1185
1186 rb_str_buf_cat2(str, "(");
1187
1188 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1189 int all_key = TRUE;
1190 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1191 if (all_key) kwds = argv[--argc];
1192 }
1193
1194 while (argc--) {
1195 VALUE arg = *argv++;
1196
1197 rb_str_append(str, rb_inspect(arg));
1198 rb_str_buf_cat2(str, ", ");
1199 }
1200 if (!NIL_P(kwds)) {
1201 rb_hash_foreach(kwds, kwd_append, str);
1202 }
1203 rb_str_set_len(str, RSTRING_LEN(str)-2);
1204 rb_str_buf_cat2(str, ")");
1205 }
1206 }
1207
1208 return str;
1209}
1210
1211/*
1212 * call-seq:
1213 * e.inspect -> string
1214 *
1215 * Creates a printable version of <i>e</i>.
1216 */
1217
1218static VALUE
1219enumerator_inspect(VALUE obj)
1220{
1221 return rb_exec_recursive(inspect_enumerator, obj, 0);
1222}
1223
1224/*
1225 * call-seq:
1226 * e.size -> int, Float::INFINITY or nil
1227 *
1228 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1229 *
1230 * (1..100).to_a.permutation(4).size # => 94109400
1231 * loop.size # => Float::INFINITY
1232 * (1..100).drop_while.size # => nil
1233 *
1234 * Note that enumerator size might be inaccurate, and should be rather treated as a hint.
1235 * For example, there is no check that the size provided to ::new is accurate:
1236 *
1237 * e = Enumerator.new(5) { |y| 2.times { y << it} }
1238 * e.size # => 5
1239 * e.to_a.size # => 2
1240 *
1241 * Another example is an enumerator created by ::produce without a +size+ argument.
1242 * Such enumerators return +Infinity+ for size, but this is inaccurate if the passed
1243 * block raises StopIteration:
1244 *
1245 * e = Enumerator.produce(1) { it + 1 }
1246 * e.size # => Infinity
1247 *
1248 * e = Enumerator.produce(1) { it > 3 ? raise(StopIteration) : it + 1 }
1249 * e.size # => Infinity
1250 * e.to_a.size # => 4
1251 */
1252
1253static VALUE
1254enumerator_size(VALUE obj)
1255{
1256 struct enumerator *e = enumerator_ptr(obj);
1257 int argc = 0;
1258 const VALUE *argv = NULL;
1259 VALUE size;
1260
1261 if (e->procs) {
1262 struct generator *g = generator_ptr(e->obj);
1263 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1264 long i = 0;
1265
1266 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1267 VALUE proc = RARRAY_AREF(e->procs, i);
1268 struct proc_entry *entry = proc_entry_ptr(proc);
1269 lazyenum_size_func *size_fn = entry->fn->size;
1270 if (!size_fn) {
1271 return Qnil;
1272 }
1273 receiver = (*size_fn)(proc, receiver);
1274 }
1275 return receiver;
1276 }
1277
1278 if (e->size_fn) {
1279 return (*e->size_fn)(e->obj, e->args, obj);
1280 }
1281 if (e->args) {
1282 argc = (int)RARRAY_LEN(e->args);
1283 argv = RARRAY_CONST_PTR(e->args);
1284 }
1285 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1286 if (!UNDEF_P(size)) return size;
1287 return e->size;
1288}
1289
1290/*
1291 * Yielder
1292 */
1293static void
1294yielder_mark_and_move(void *p)
1295{
1296 struct yielder *ptr = p;
1297 rb_gc_mark_and_move(&ptr->proc);
1298}
1299
1300static const rb_data_type_t yielder_data_type = {
1301 "yielder",
1302 {
1303 yielder_mark_and_move,
1305 NULL,
1306 yielder_mark_and_move,
1307 },
1308 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1309};
1310
1311static struct yielder *
1312yielder_ptr(VALUE obj)
1313{
1314 struct yielder *ptr;
1315
1316 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1317 if (!ptr || UNDEF_P(ptr->proc)) {
1318 rb_raise(rb_eArgError, "uninitialized yielder");
1319 }
1320 return ptr;
1321}
1322
1323/* :nodoc: */
1324static VALUE
1325yielder_allocate(VALUE klass)
1326{
1327 struct yielder *ptr;
1328 VALUE obj;
1329
1330 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1331 ptr->proc = Qundef;
1332
1333 return obj;
1334}
1335
1336static VALUE
1337yielder_init(VALUE obj, VALUE proc)
1338{
1339 struct yielder *ptr;
1340
1341 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1342
1343 if (!ptr) {
1344 rb_raise(rb_eArgError, "unallocated yielder");
1345 }
1346
1347 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1348
1349 return obj;
1350}
1351
1352/* :nodoc: */
1353static VALUE
1354yielder_initialize(VALUE obj)
1355{
1356 rb_need_block();
1357
1358 return yielder_init(obj, rb_block_proc());
1359}
1360
1361/* :nodoc: */
1362static VALUE
1363yielder_yield(VALUE obj, VALUE args)
1364{
1365 struct yielder *ptr = yielder_ptr(obj);
1366
1367 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1368}
1369
1370/* :nodoc: */
1371static VALUE
1372yielder_yield_push(VALUE obj, VALUE arg)
1373{
1374 struct yielder *ptr = yielder_ptr(obj);
1375
1376 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1377
1378 return obj;
1379}
1380
1381/*
1382 * Returns a Proc object that takes arguments and yields them.
1383 *
1384 * This method is implemented so that a Yielder object can be directly
1385 * passed to another method as a block argument.
1386 *
1387 * enum = Enumerator.new { |y|
1388 * Dir.glob("*.rb") { |file|
1389 * File.open(file) { |f| f.each_line(&y) }
1390 * }
1391 * }
1392 */
1393static VALUE
1394yielder_to_proc(VALUE obj)
1395{
1396 VALUE method = rb_obj_method(obj, sym_yield);
1397
1398 return rb_funcall(method, idTo_proc, 0);
1399}
1400
1401static VALUE
1402yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1403{
1404 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1405}
1406
1407static VALUE
1408yielder_new(void)
1409{
1410 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1411}
1412
1413/*
1414 * Generator
1415 */
1416static void
1417generator_mark_and_move(void *p)
1418{
1419 struct generator *ptr = p;
1420 rb_gc_mark_and_move(&ptr->proc);
1421 rb_gc_mark_and_move(&ptr->obj);
1422}
1423
1424static const rb_data_type_t generator_data_type = {
1425 "generator",
1426 {
1427 generator_mark_and_move,
1429 NULL,
1430 generator_mark_and_move,
1431 },
1432 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1433};
1434
1435static struct generator *
1436generator_ptr(VALUE obj)
1437{
1438 struct generator *ptr;
1439
1440 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1441 if (!ptr || UNDEF_P(ptr->proc)) {
1442 rb_raise(rb_eArgError, "uninitialized generator");
1443 }
1444 return ptr;
1445}
1446
1447/* :nodoc: */
1448static VALUE
1449generator_allocate(VALUE klass)
1450{
1451 struct generator *ptr;
1452 VALUE obj;
1453
1454 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1455 ptr->proc = Qundef;
1456
1457 return obj;
1458}
1459
1460static VALUE
1461generator_init(VALUE obj, VALUE proc)
1462{
1463 struct generator *ptr;
1464
1465 rb_check_frozen(obj);
1466 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1467
1468 if (!ptr) {
1469 rb_raise(rb_eArgError, "unallocated generator");
1470 }
1471
1472 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1473
1474 return obj;
1475}
1476
1477/* :nodoc: */
1478static VALUE
1479generator_initialize(int argc, VALUE *argv, VALUE obj)
1480{
1481 VALUE proc;
1482
1483 if (argc == 0) {
1484 rb_need_block();
1485
1486 proc = rb_block_proc();
1487 }
1488 else {
1489 rb_scan_args(argc, argv, "1", &proc);
1490
1491 if (!rb_obj_is_proc(proc))
1492 rb_raise(rb_eTypeError,
1493 "wrong argument type %"PRIsVALUE" (expected Proc)",
1494 rb_obj_class(proc));
1495
1496 if (rb_block_given_p()) {
1497 rb_warn("given block not used");
1498 }
1499 }
1500
1501 return generator_init(obj, proc);
1502}
1503
1504/* :nodoc: */
1505static VALUE
1506generator_init_copy(VALUE obj, VALUE orig)
1507{
1508 struct generator *ptr0, *ptr1;
1509
1510 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1511
1512 ptr0 = generator_ptr(orig);
1513
1514 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1515
1516 if (!ptr1) {
1517 rb_raise(rb_eArgError, "unallocated generator");
1518 }
1519
1520 RB_OBJ_WRITE(obj, &ptr1->proc, ptr0->proc);
1521
1522 return obj;
1523}
1524
1525/* :nodoc: */
1526static VALUE
1527generator_each(int argc, VALUE *argv, VALUE obj)
1528{
1529 struct generator *ptr = generator_ptr(obj);
1530 VALUE args = rb_ary_new2(argc + 1);
1531
1532 rb_ary_push(args, yielder_new());
1533 if (argc > 0) {
1534 rb_ary_cat(args, argv, argc);
1535 }
1536
1537 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1538}
1539
1540/* Lazy Enumerator methods */
1541static VALUE
1542enum_size(VALUE self)
1543{
1544 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1545 return UNDEF_P(r) ? Qnil : r;
1546}
1547
1548static VALUE
1549lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1550{
1551 return enum_size(self);
1552}
1553
1554#define lazy_receiver_size lazy_map_size
1555
1556static VALUE
1557lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1558{
1559 VALUE result;
1560 if (argc == 1) {
1561 VALUE args[2];
1562 args[0] = m;
1563 args[1] = val;
1564 result = rb_yield_values2(2, args);
1565 }
1566 else {
1567 VALUE args;
1568 int len = rb_long2int((long)argc + 1);
1569 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1570
1571 nargv[0] = m;
1572 if (argc > 0) {
1573 MEMCPY(nargv + 1, argv, VALUE, argc);
1574 }
1575 result = rb_yield_values2(len, nargv);
1576 ALLOCV_END(args);
1577 }
1578 if (UNDEF_P(result)) rb_iter_break();
1579 return Qnil;
1580}
1581
1582static VALUE
1583lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1584{
1585 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1586 return Qnil;
1587}
1588
1589#define memo_value v2
1590#define memo_flags u3.state
1591#define LAZY_MEMO_BREAK 1
1592#define LAZY_MEMO_PACKED 2
1593#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1594#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1595#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1596#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1597#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1598#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1599#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1600
1601#define LAZY_NEED_BLOCK(func) \
1602 if (!rb_block_given_p()) { \
1603 rb_raise(rb_eArgError, "tried to call lazy " #func " without a block"); \
1604 }
1605
1606static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1607
1608static VALUE
1609lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1610{
1611 VALUE yielder = RARRAY_AREF(m, 0);
1612 VALUE procs_array = RARRAY_AREF(m, 1);
1613 VALUE memos = rb_attr_get(yielder, id_memo);
1614 struct MEMO *result;
1615
1616 result = rb_imemo_memo_new(m, rb_enum_values_pack(argc, argv),
1617 argc > 1 ? LAZY_MEMO_PACKED : 0);
1618 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1619}
1620
1621static VALUE
1622lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1623{
1624 VALUE m = result->v1;
1625 VALUE yielder = RARRAY_AREF(m, 0);
1626 VALUE procs_array = RARRAY_AREF(m, 1);
1627 VALUE memos = rb_attr_get(yielder, id_memo);
1628 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1629 if (argc > 1)
1630 LAZY_MEMO_SET_PACKED(result);
1631 else
1632 LAZY_MEMO_RESET_PACKED(result);
1633 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1634}
1635
1636static VALUE
1637lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1638{
1639 int cont = 1;
1640
1641 for (; i < RARRAY_LEN(procs_array); i++) {
1642 VALUE proc = RARRAY_AREF(procs_array, i);
1643 struct proc_entry *entry = proc_entry_ptr(proc);
1644 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1645 cont = 0;
1646 break;
1647 }
1648 }
1649
1650 if (cont) {
1651 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1652 }
1653 if (LAZY_MEMO_BREAK_P(result)) {
1654 rb_iter_break();
1655 }
1656 return result->memo_value;
1657}
1658
1659static VALUE
1660lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1661{
1662 VALUE procs = RARRAY_AREF(m, 1);
1663
1664 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1665 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1666 lazy_init_yielder, rb_ary_new3(2, val, procs));
1667 return Qnil;
1668}
1669
1670static VALUE
1671lazy_generator_init(VALUE enumerator, VALUE procs)
1672{
1674 VALUE obj;
1675 struct generator *gen_ptr;
1676 struct enumerator *e = enumerator_ptr(enumerator);
1677
1678 if (RARRAY_LEN(procs) > 0) {
1679 struct generator *old_gen_ptr = generator_ptr(e->obj);
1680 obj = old_gen_ptr->obj;
1681 }
1682 else {
1683 obj = enumerator;
1684 }
1685
1686 generator = generator_allocate(rb_cGenerator);
1687
1688 rb_block_call(generator, id_initialize, 0, 0,
1689 lazy_init_block, rb_ary_new3(2, obj, procs));
1690
1691 gen_ptr = generator_ptr(generator);
1692 RB_OBJ_WRITE(generator, &gen_ptr->obj, obj);
1693
1694 return generator;
1695}
1696
1697static int
1698lazy_precheck(VALUE procs)
1699{
1700 if (RTEST(procs)) {
1701 long num_procs = RARRAY_LEN(procs), i = num_procs;
1702 while (i-- > 0) {
1703 VALUE proc = RARRAY_AREF(procs, i);
1704 struct proc_entry *entry = proc_entry_ptr(proc);
1705 lazyenum_precheck_func *precheck = entry->fn->precheck;
1706 if (precheck && !precheck(proc)) return FALSE;
1707 }
1708 }
1709
1710 return TRUE;
1711}
1712
1713/*
1714 * Document-class: Enumerator::Lazy
1715 *
1716 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1717 * chains of operations without evaluating them immediately, and evaluating
1718 * values on as-needed basis. In order to do so it redefines most of Enumerable
1719 * methods so that they just construct another lazy enumerator.
1720 *
1721 * Enumerator::Lazy can be constructed from any Enumerable with the
1722 * Enumerable#lazy method.
1723 *
1724 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1725 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1726 *
1727 * The real enumeration is performed when any non-redefined Enumerable method
1728 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1729 * as #force for more semantic code):
1730 *
1731 * lazy.first(2)
1732 * #=> [21, 23]
1733 *
1734 * lazy.force
1735 * #=> [21, 23, 25, 27, 29]
1736 *
1737 * Note that most Enumerable methods that could be called with or without
1738 * a block, on Enumerator::Lazy will always require a block:
1739 *
1740 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1741 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1742 *
1743 * This class allows idiomatic calculations on long or infinite sequences, as well
1744 * as chaining of calculations without constructing intermediate arrays.
1745 *
1746 * Example for working with a slowly calculated sequence:
1747 *
1748 * require 'open-uri'
1749 *
1750 * # This will fetch all URLs before selecting
1751 * # necessary data
1752 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1753 * .select { |data| data.key?('stats') }
1754 * .first(5)
1755 *
1756 * # This will fetch URLs one-by-one, only till
1757 * # there is enough data to satisfy the condition
1758 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1759 * .select { |data| data.key?('stats') }
1760 * .first(5)
1761 *
1762 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1763 * is suitable for returning or passing to another method that expects
1764 * a normal enumerator.
1765 *
1766 * def active_items
1767 * groups
1768 * .lazy
1769 * .flat_map(&:items)
1770 * .reject(&:disabled)
1771 * .eager
1772 * end
1773 *
1774 * # This works lazily; if a checked item is found, it stops
1775 * # iteration and does not look into remaining groups.
1776 * first_checked = active_items.find(&:checked)
1777 *
1778 * # This returns an array of items like a normal enumerator does.
1779 * all_checked = active_items.select(&:checked)
1780 *
1781 */
1782
1783/*
1784 * call-seq:
1785 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1786 *
1787 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1788 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1789 * to the given block. The block can yield values back using +yielder+.
1790 * For example, to create a "filter+map" enumerator:
1791 *
1792 * def filter_map(sequence)
1793 * Lazy.new(sequence) do |yielder, *values|
1794 * result = yield *values
1795 * yielder << result if result
1796 * end
1797 * end
1798 *
1799 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1800 * #=> [4, 16, 36, 64, 100]
1801 */
1802static VALUE
1803lazy_initialize(int argc, VALUE *argv, VALUE self)
1804{
1805 VALUE obj, size = Qnil;
1807
1808 rb_check_arity(argc, 1, 2);
1809 LAZY_NEED_BLOCK(new);
1810 obj = argv[0];
1811 if (argc > 1) {
1812 size = argv[1];
1813 }
1814 generator = generator_allocate(rb_cGenerator);
1815 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1816 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1817 rb_ivar_set(self, id_receiver, obj);
1818
1819 return self;
1820}
1821
1822#if 0 /* for RDoc */
1823/*
1824 * call-seq:
1825 * lazy.to_a -> array
1826 * lazy.force -> array
1827 *
1828 * Expands +lazy+ enumerator to an array.
1829 * See Enumerable#to_a.
1830 */
1831static VALUE
1832lazy_to_a(VALUE self)
1833{
1834}
1835#endif
1836
1837static void
1838lazy_set_args(VALUE lazy, VALUE args)
1839{
1840 ID id = rb_frame_this_func();
1841 rb_ivar_set(lazy, id_method, ID2SYM(id));
1842 if (NIL_P(args)) {
1843 /* Qfalse indicates that the arguments are empty */
1844 rb_ivar_set(lazy, id_arguments, Qfalse);
1845 }
1846 else {
1847 rb_ivar_set(lazy, id_arguments, args);
1848 }
1849}
1850
1851#if 0
1852static VALUE
1853lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1854{
1855 struct enumerator *e = enumerator_ptr(lazy);
1856 lazy_set_args(lazy, args);
1857 e->size_fn = size_fn;
1858 return lazy;
1859}
1860#endif
1861
1862static VALUE
1863lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1864 const lazyenum_funcs *fn)
1865{
1866 struct enumerator *new_e;
1867 VALUE new_obj;
1868 VALUE new_generator;
1869 VALUE new_procs;
1870 struct enumerator *e = enumerator_ptr(obj);
1871 struct proc_entry *entry;
1873 &proc_entry_data_type, entry);
1874 if (rb_block_given_p()) {
1875 RB_OBJ_WRITE(entry_obj, &entry->proc, rb_block_proc());
1876 }
1877 entry->fn = fn;
1878 RB_OBJ_WRITE(entry_obj, &entry->memo, args);
1879
1880 lazy_set_args(entry_obj, memo);
1881
1882 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1883 new_generator = lazy_generator_init(obj, new_procs);
1884 rb_ary_push(new_procs, entry_obj);
1885
1886 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1887 new_e = RTYPEDDATA_GET_DATA(new_obj);
1888 RB_OBJ_WRITE(new_obj, &new_e->obj, new_generator);
1889 RB_OBJ_WRITE(new_obj, &new_e->procs, new_procs);
1890
1891 if (argc > 0) {
1892 new_e->meth = rb_to_id(*argv++);
1893 --argc;
1894 }
1895 else {
1896 new_e->meth = id_each;
1897 }
1898
1899 RB_OBJ_WRITE(new_obj, &new_e->args, rb_ary_new4(argc, argv));
1900
1901 return new_obj;
1902}
1903
1904/*
1905 * call-seq:
1906 * e.lazy -> lazy_enumerator
1907 *
1908 * Returns an Enumerator::Lazy, which redefines most Enumerable
1909 * methods to postpone enumeration and enumerate values only on an
1910 * as-needed basis.
1911 *
1912 * === Example
1913 *
1914 * The following program finds pythagorean triples:
1915 *
1916 * def pythagorean_triples
1917 * (1..Float::INFINITY).lazy.flat_map {|z|
1918 * (1..z).flat_map {|x|
1919 * (x..z).select {|y|
1920 * x**2 + y**2 == z**2
1921 * }.map {|y|
1922 * [x, y, z]
1923 * }
1924 * }
1925 * }
1926 * end
1927 * # show first ten pythagorean triples
1928 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1929 * p pythagorean_triples.first(10) # first is eager
1930 * # show pythagorean triples less than 100
1931 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1932 */
1933static VALUE
1934enumerable_lazy(VALUE obj)
1935{
1936 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1937 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1938 rb_ivar_set(result, id_method, Qfalse);
1939 return result;
1940}
1941
1942static VALUE
1943lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1944{
1945 return enumerator_init(enumerator_allocate(rb_cLazy),
1946 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1947}
1948
1949/*
1950 * call-seq:
1951 * lzy.to_enum(method = :each, *args) -> lazy_enum
1952 * lzy.enum_for(method = :each, *args) -> lazy_enum
1953 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1954 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1955 *
1956 * Similar to Object#to_enum, except it returns a lazy enumerator.
1957 * This makes it easy to define Enumerable methods that will
1958 * naturally remain lazy if called from a lazy enumerator.
1959 *
1960 * For example, continuing from the example in Object#to_enum:
1961 *
1962 * # See Object#to_enum for the definition of repeat
1963 * r = 1..Float::INFINITY
1964 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1965 * r.repeat(2).class # => Enumerator
1966 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1967 * # works naturally on lazy enumerator:
1968 * r.lazy.repeat(2).class # => Enumerator::Lazy
1969 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1970 */
1971
1972static VALUE
1973lazy_to_enum(int argc, VALUE *argv, VALUE self)
1974{
1975 VALUE lazy, meth = sym_each, super_meth;
1976
1977 if (argc > 0) {
1978 --argc;
1979 meth = *argv++;
1980 }
1981 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
1982 meth = super_meth;
1983 }
1984 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
1985 if (rb_block_given_p()) {
1986 RB_OBJ_WRITE(lazy, &enumerator_ptr(lazy)->size, rb_block_proc());
1987 }
1988 return lazy;
1989}
1990
1991static VALUE
1992lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
1993{
1994 return enum_size(self);
1995}
1996
1997/*
1998 * call-seq:
1999 * lzy.eager -> enum
2000 *
2001 * Returns a non-lazy Enumerator converted from the lazy enumerator.
2002 */
2003
2004static VALUE
2005lazy_eager(VALUE self)
2006{
2007 return enumerator_init(enumerator_allocate(rb_cEnumerator),
2008 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
2009}
2010
2011static VALUE
2012lazyenum_yield(VALUE proc_entry, struct MEMO *result)
2013{
2014 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2015 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2016}
2017
2018static VALUE
2019lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2020{
2021 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2022 int argc = 1;
2023 const VALUE *argv = &result->memo_value;
2024 if (LAZY_MEMO_PACKED_P(result)) {
2025 const VALUE args = *argv;
2026 argc = RARRAY_LENINT(args);
2027 argv = RARRAY_CONST_PTR(args);
2028 }
2029 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2030}
2031
2032static struct MEMO *
2033lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2034{
2035 VALUE value = lazyenum_yield_values(proc_entry, result);
2036 LAZY_MEMO_SET_VALUE(result, value);
2037 LAZY_MEMO_RESET_PACKED(result);
2038 return result;
2039}
2040
2041static VALUE
2042lazy_map_size(VALUE entry, VALUE receiver)
2043{
2044 return receiver;
2045}
2046
2047static const lazyenum_funcs lazy_map_funcs = {
2048 lazy_map_proc, lazy_map_size,
2049};
2050
2051/*
2052 * call-seq:
2053 * lazy.collect { |obj| block } -> lazy_enumerator
2054 * lazy.map { |obj| block } -> lazy_enumerator
2055 *
2056 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2057 *
2058 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2059 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2060 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2061 * #=> [1, 4, 9]
2062 */
2063
2064static VALUE
2065lazy_map(VALUE obj)
2066{
2067 LAZY_NEED_BLOCK(map);
2068 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2069}
2070
2072 struct MEMO *result;
2073 long index;
2074};
2075
2076static VALUE
2077lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2078{
2079 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2080
2081 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2082}
2083
2084static struct MEMO *
2085lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2086{
2087 VALUE value = lazyenum_yield_values(proc_entry, result);
2088 VALUE ary = 0;
2089 const long proc_index = memo_index + 1;
2090 int break_p = LAZY_MEMO_BREAK_P(result);
2091
2092 if (RB_TYPE_P(value, T_ARRAY)) {
2093 ary = value;
2094 }
2095 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2096 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2097 LAZY_MEMO_RESET_BREAK(result);
2098 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2099 if (break_p) LAZY_MEMO_SET_BREAK(result);
2100 return 0;
2101 }
2102
2103 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2104 long i;
2105 LAZY_MEMO_RESET_BREAK(result);
2106 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2107 const VALUE argv = RARRAY_AREF(ary, i);
2108 lazy_yielder_yield(result, proc_index, 1, &argv);
2109 }
2110 if (break_p) LAZY_MEMO_SET_BREAK(result);
2111 if (i >= RARRAY_LEN(ary)) return 0;
2112 value = RARRAY_AREF(ary, i);
2113 }
2114 LAZY_MEMO_SET_VALUE(result, value);
2115 LAZY_MEMO_RESET_PACKED(result);
2116 return result;
2117}
2118
2119static const lazyenum_funcs lazy_flat_map_funcs = {
2120 lazy_flat_map_proc, 0,
2121};
2122
2123/*
2124 * call-seq:
2125 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2126 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2127 *
2128 * Returns a new lazy enumerator with the concatenated results of running
2129 * +block+ once for every element in the lazy enumerator.
2130 *
2131 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2132 * #=> ["f", "o", "o", "b", "a", "r"]
2133 *
2134 * A value +x+ returned by +block+ is decomposed if either of
2135 * the following conditions is true:
2136 *
2137 * * +x+ responds to both each and force, which means that
2138 * +x+ is a lazy enumerator.
2139 * * +x+ is an array or responds to to_ary.
2140 *
2141 * Otherwise, +x+ is contained as-is in the return value.
2142 *
2143 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2144 * #=> [{:a=>1}, {:b=>2}]
2145 */
2146static VALUE
2147lazy_flat_map(VALUE obj)
2148{
2149 LAZY_NEED_BLOCK(flat_map);
2150 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2151}
2152
2153static struct MEMO *
2154lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2155{
2156 VALUE chain = lazyenum_yield(proc_entry, result);
2157 if (!RTEST(chain)) return 0;
2158 return result;
2159}
2160
2161static const lazyenum_funcs lazy_select_funcs = {
2162 lazy_select_proc, 0,
2163};
2164
2165/*
2166 * call-seq:
2167 * lazy.find_all { |obj| block } -> lazy_enumerator
2168 * lazy.select { |obj| block } -> lazy_enumerator
2169 * lazy.filter { |obj| block } -> lazy_enumerator
2170 *
2171 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2172 */
2173static VALUE
2174lazy_select(VALUE obj)
2175{
2176 LAZY_NEED_BLOCK(select);
2177 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2178}
2179
2180static struct MEMO *
2181lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2182{
2183 VALUE value = lazyenum_yield_values(proc_entry, result);
2184 if (!RTEST(value)) return 0;
2185 LAZY_MEMO_SET_VALUE(result, value);
2186 LAZY_MEMO_RESET_PACKED(result);
2187 return result;
2188}
2189
2190static const lazyenum_funcs lazy_filter_map_funcs = {
2191 lazy_filter_map_proc, 0,
2192};
2193
2194/*
2195 * call-seq:
2196 * lazy.filter_map { |obj| block } -> lazy_enumerator
2197 *
2198 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2199 *
2200 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2201 * #=> [4, 8, 12, 16, 20]
2202 */
2203
2204static VALUE
2205lazy_filter_map(VALUE obj)
2206{
2207 LAZY_NEED_BLOCK(filter_map);
2208 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2209}
2210
2211static struct MEMO *
2212lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2213{
2214 VALUE chain = lazyenum_yield(proc_entry, result);
2215 if (RTEST(chain)) return 0;
2216 return result;
2217}
2218
2219static const lazyenum_funcs lazy_reject_funcs = {
2220 lazy_reject_proc, 0,
2221};
2222
2223/*
2224 * call-seq:
2225 * lazy.reject { |obj| block } -> lazy_enumerator
2226 *
2227 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2228 */
2229
2230static VALUE
2231lazy_reject(VALUE obj)
2232{
2233 LAZY_NEED_BLOCK(reject);
2234 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2235}
2236
2237static struct MEMO *
2238lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2239{
2240 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2241 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2242 if (!RTEST(chain)) return 0;
2243 return result;
2244}
2245
2246static struct MEMO *
2247lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2248{
2249 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2250 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2251
2252 if (!RTEST(chain)) return 0;
2253 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2254 LAZY_MEMO_SET_VALUE(result, value);
2255 LAZY_MEMO_RESET_PACKED(result);
2256
2257 return result;
2258}
2259
2260static const lazyenum_funcs lazy_grep_iter_funcs = {
2261 lazy_grep_iter_proc, 0,
2262};
2263
2264static const lazyenum_funcs lazy_grep_funcs = {
2265 lazy_grep_proc, 0,
2266};
2267
2268/*
2269 * call-seq:
2270 * lazy.grep(pattern) -> lazy_enumerator
2271 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2272 *
2273 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2274 */
2275
2276static VALUE
2277lazy_grep(VALUE obj, VALUE pattern)
2278{
2279 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2280 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2281 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2282}
2283
2284static struct MEMO *
2285lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2286{
2287 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2288 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2289 if (RTEST(chain)) return 0;
2290 return result;
2291}
2292
2293static struct MEMO *
2294lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2295{
2296 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2297 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2298
2299 if (RTEST(chain)) return 0;
2300 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2301 LAZY_MEMO_SET_VALUE(result, value);
2302 LAZY_MEMO_RESET_PACKED(result);
2303
2304 return result;
2305}
2306
2307static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2308 lazy_grep_v_iter_proc, 0,
2309};
2310
2311static const lazyenum_funcs lazy_grep_v_funcs = {
2312 lazy_grep_v_proc, 0,
2313};
2314
2315/*
2316 * call-seq:
2317 * lazy.grep_v(pattern) -> lazy_enumerator
2318 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2319 *
2320 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2321 */
2322
2323static VALUE
2324lazy_grep_v(VALUE obj, VALUE pattern)
2325{
2326 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2327 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2328 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2329}
2330
2331static VALUE
2332call_next(VALUE obj)
2333{
2334 return rb_funcall(obj, id_next, 0);
2335}
2336
2337static VALUE
2338next_stopped(VALUE obj, VALUE _)
2339{
2340 return Qnil;
2341}
2342
2343static struct MEMO *
2344lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2345{
2346 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2347 VALUE ary, arrays = entry->memo;
2348 VALUE memo = rb_ary_entry(memos, memo_index);
2349 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2350
2351 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2352 rb_ary_push(ary, result->memo_value);
2353 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2354 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2355 }
2356 LAZY_MEMO_SET_VALUE(result, ary);
2357 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2358 return result;
2359}
2360
2361static struct MEMO *
2362lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2363{
2364 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2365 VALUE arg = rb_ary_entry(memos, memo_index);
2366 VALUE zip_args = entry->memo;
2367 VALUE ary, v;
2368 long i;
2369
2370 if (NIL_P(arg)) {
2371 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2372 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2373 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2374 }
2375 rb_ary_store(memos, memo_index, arg);
2376 }
2377
2378 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2379 rb_ary_push(ary, result->memo_value);
2380 for (i = 0; i < RARRAY_LEN(arg); i++) {
2381 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2383 rb_ary_push(ary, v);
2384 }
2385 LAZY_MEMO_SET_VALUE(result, ary);
2386 return result;
2387}
2388
2389static const lazyenum_funcs lazy_zip_funcs[] = {
2390 {lazy_zip_func, lazy_receiver_size,},
2391 {lazy_zip_arrays_func, lazy_receiver_size,},
2392};
2393
2394/*
2395 * call-seq:
2396 * lazy.zip(arg, ...) -> lazy_enumerator
2397 * lazy.zip(arg, ...) { |arr| block } -> nil
2398 *
2399 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2400 * However, if a block is given to zip, values are enumerated immediately.
2401 */
2402static VALUE
2403lazy_zip(int argc, VALUE *argv, VALUE obj)
2404{
2405 VALUE ary, v;
2406 long i;
2407 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2408
2409 if (rb_block_given_p()) {
2410 return rb_call_super(argc, argv);
2411 }
2412
2413 ary = rb_ary_new2(argc);
2414 for (i = 0; i < argc; i++) {
2415 v = rb_check_array_type(argv[i]);
2416 if (NIL_P(v)) {
2417 for (; i < argc; i++) {
2418 if (!rb_respond_to(argv[i], id_each)) {
2419 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2420 rb_obj_class(argv[i]));
2421 }
2422 }
2423 ary = rb_ary_new4(argc, argv);
2424 funcs = &lazy_zip_funcs[0];
2425 break;
2426 }
2427 rb_ary_push(ary, v);
2428 }
2429
2430 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2431}
2432
2433static struct MEMO *
2434lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2435{
2436 long remain;
2437 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2438 VALUE memo = rb_ary_entry(memos, memo_index);
2439
2440 if (NIL_P(memo)) {
2441 memo = entry->memo;
2442 }
2443
2444 remain = NUM2LONG(memo);
2445 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2446 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2447 return result;
2448}
2449
2450static VALUE
2451lazy_take_size(VALUE entry, VALUE receiver)
2452{
2453 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2454 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2455 return receiver;
2456 return LONG2NUM(len);
2457}
2458
2459static int
2460lazy_take_precheck(VALUE proc_entry)
2461{
2462 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2463 return entry->memo != INT2FIX(0);
2464}
2465
2466static const lazyenum_funcs lazy_take_funcs = {
2467 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2468};
2469
2470/*
2471 * call-seq:
2472 * lazy.take(n) -> lazy_enumerator
2473 *
2474 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2475 */
2476
2477static VALUE
2478lazy_take(VALUE obj, VALUE n)
2479{
2480 long len = NUM2LONG(n);
2481
2482 if (len < 0) {
2483 rb_raise(rb_eArgError, "attempt to take negative size");
2484 }
2485
2486 n = LONG2NUM(len); /* no more conversion */
2487
2488 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2489}
2490
2491static struct MEMO *
2492lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2493{
2494 VALUE take = lazyenum_yield_values(proc_entry, result);
2495 if (!RTEST(take)) {
2496 LAZY_MEMO_SET_BREAK(result);
2497 return 0;
2498 }
2499 return result;
2500}
2501
2502static const lazyenum_funcs lazy_take_while_funcs = {
2503 lazy_take_while_proc, 0,
2504};
2505
2506/*
2507 * call-seq:
2508 * lazy.take_while { |obj| block } -> lazy_enumerator
2509 *
2510 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2511 */
2512
2513static VALUE
2514lazy_take_while(VALUE obj)
2515{
2516 LAZY_NEED_BLOCK(take_while);
2517 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2518}
2519
2520static VALUE
2521lazy_drop_size(VALUE proc_entry, VALUE receiver)
2522{
2523 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2524 if (NIL_P(receiver))
2525 return receiver;
2526 if (FIXNUM_P(receiver)) {
2527 len = FIX2LONG(receiver) - len;
2528 return LONG2FIX(len < 0 ? 0 : len);
2529 }
2530 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2531}
2532
2533static struct MEMO *
2534lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2535{
2536 long remain;
2537 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2538 VALUE memo = rb_ary_entry(memos, memo_index);
2539
2540 if (NIL_P(memo)) {
2541 memo = entry->memo;
2542 }
2543 remain = NUM2LONG(memo);
2544 if (remain > 0) {
2545 --remain;
2546 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2547 return 0;
2548 }
2549
2550 return result;
2551}
2552
2553static const lazyenum_funcs lazy_drop_funcs = {
2554 lazy_drop_proc, lazy_drop_size,
2555};
2556
2557/*
2558 * call-seq:
2559 * lazy.drop(n) -> lazy_enumerator
2560 *
2561 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2562 */
2563
2564static VALUE
2565lazy_drop(VALUE obj, VALUE n)
2566{
2567 long len = NUM2LONG(n);
2568 VALUE argv[2];
2569 argv[0] = sym_each;
2570 argv[1] = n;
2571
2572 if (len < 0) {
2573 rb_raise(rb_eArgError, "attempt to drop negative size");
2574 }
2575
2576 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2577}
2578
2579static struct MEMO *
2580lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2581{
2582 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2583 VALUE memo = rb_ary_entry(memos, memo_index);
2584
2585 if (NIL_P(memo)) {
2586 memo = entry->memo;
2587 }
2588
2589 if (!RTEST(memo)) {
2590 VALUE drop = lazyenum_yield_values(proc_entry, result);
2591 if (RTEST(drop)) return 0;
2592 rb_ary_store(memos, memo_index, Qtrue);
2593 }
2594 return result;
2595}
2596
2597static const lazyenum_funcs lazy_drop_while_funcs = {
2598 lazy_drop_while_proc, 0,
2599};
2600
2601/*
2602 * call-seq:
2603 * lazy.drop_while { |obj| block } -> lazy_enumerator
2604 *
2605 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2606 */
2607
2608static VALUE
2609lazy_drop_while(VALUE obj)
2610{
2611 LAZY_NEED_BLOCK(drop_while);
2612 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2613}
2614
2615static int
2616lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2617{
2618 VALUE hash = rb_ary_entry(memos, memo_index);
2619
2620 if (NIL_P(hash)) {
2621 hash = rb_obj_hide(rb_hash_new());
2622 rb_ary_store(memos, memo_index, hash);
2623 }
2624
2625 return rb_hash_add_new_element(hash, chain, Qfalse);
2626}
2627
2628static struct MEMO *
2629lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2630{
2631 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2632 return result;
2633}
2634
2635static struct MEMO *
2636lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2637{
2638 VALUE chain = lazyenum_yield(proc_entry, result);
2639
2640 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2641 return result;
2642}
2643
2644static const lazyenum_funcs lazy_uniq_iter_funcs = {
2645 lazy_uniq_iter_proc, 0,
2646};
2647
2648static const lazyenum_funcs lazy_uniq_funcs = {
2649 lazy_uniq_proc, 0,
2650};
2651
2652/*
2653 * call-seq:
2654 * lazy.uniq -> lazy_enumerator
2655 * lazy.uniq { |item| block } -> lazy_enumerator
2656 *
2657 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2658 */
2659
2660static VALUE
2661lazy_uniq(VALUE obj)
2662{
2663 const lazyenum_funcs *const funcs =
2664 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2665 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2666}
2667
2668static struct MEMO *
2669lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2670{
2671 if (NIL_P(result->memo_value)) return 0;
2672 return result;
2673}
2674
2675static const lazyenum_funcs lazy_compact_funcs = {
2676 lazy_compact_proc, 0,
2677};
2678
2679/*
2680 * call-seq:
2681 * lazy.compact -> lazy_enumerator
2682 *
2683 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2684 */
2685
2686static VALUE
2687lazy_compact(VALUE obj)
2688{
2689 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2690}
2691
2692static struct MEMO *
2693lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2694{
2695 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2696 VALUE memo = rb_ary_entry(memos, memo_index);
2697 VALUE argv[2];
2698
2699 if (NIL_P(memo)) {
2700 memo = entry->memo;
2701 }
2702
2703 argv[0] = result->memo_value;
2704 argv[1] = memo;
2705 if (entry->proc) {
2706 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2707 LAZY_MEMO_RESET_PACKED(result);
2708 }
2709 else {
2710 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2711 LAZY_MEMO_SET_PACKED(result);
2712 }
2713 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2714 return result;
2715}
2716
2717static VALUE
2718lazy_with_index_size(VALUE proc, VALUE receiver)
2719{
2720 return receiver;
2721}
2722
2723static const lazyenum_funcs lazy_with_index_funcs = {
2724 lazy_with_index_proc, lazy_with_index_size,
2725};
2726
2727/*
2728 * call-seq:
2729 * lazy.with_index(offset = 0) {|(*args), idx| block }
2730 * lazy.with_index(offset = 0)
2731 *
2732 * If a block is given, returns a lazy enumerator that will
2733 * iterate over the given block for each element
2734 * with an index, which starts from +offset+, and returns a
2735 * lazy enumerator that yields the same values (without the index).
2736 *
2737 * If a block is not given, returns a new lazy enumerator that
2738 * includes the index, starting from +offset+.
2739 *
2740 * +offset+:: the starting index to use
2741 *
2742 * See Enumerator#with_index.
2743 */
2744static VALUE
2745lazy_with_index(int argc, VALUE *argv, VALUE obj)
2746{
2747 VALUE memo;
2748
2749 rb_scan_args(argc, argv, "01", &memo);
2750 if (NIL_P(memo))
2751 memo = LONG2NUM(0);
2752
2753 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2754}
2755
2756#if 0 /* for RDoc */
2757
2758/*
2759 * call-seq:
2760 * lazy.chunk { |elt| ... } -> lazy_enumerator
2761 *
2762 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2763 */
2764static VALUE
2765lazy_chunk(VALUE self)
2766{
2767}
2768
2769/*
2770 * call-seq:
2771 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2772 *
2773 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2774 */
2775static VALUE
2776lazy_chunk_while(VALUE self)
2777{
2778}
2779
2780/*
2781 * call-seq:
2782 * lazy.slice_after(pattern) -> lazy_enumerator
2783 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2784 *
2785 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2786 */
2787static VALUE
2788lazy_slice_after(VALUE self)
2789{
2790}
2791
2792/*
2793 * call-seq:
2794 * lazy.slice_before(pattern) -> lazy_enumerator
2795 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2796 *
2797 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2798 */
2799static VALUE
2800lazy_slice_before(VALUE self)
2801{
2802}
2803
2804/*
2805 * call-seq:
2806 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2807 *
2808 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2809 */
2810static VALUE
2811lazy_slice_when(VALUE self)
2812{
2813}
2814# endif
2815
2816static VALUE
2817lazy_super(int argc, VALUE *argv, VALUE lazy)
2818{
2819 return enumerable_lazy(rb_call_super(argc, argv));
2820}
2821
2822/*
2823 * call-seq:
2824 * enum.lazy -> lazy_enumerator
2825 *
2826 * Returns self.
2827 */
2828
2829static VALUE
2830lazy_lazy(VALUE obj)
2831{
2832 return obj;
2833}
2834
2835/*
2836 * Document-class: StopIteration
2837 *
2838 * Raised to stop the iteration, in particular by Enumerator#next. It is
2839 * rescued by Kernel#loop.
2840 *
2841 * loop do
2842 * puts "Hello"
2843 * raise StopIteration
2844 * puts "World"
2845 * end
2846 * puts "Done!"
2847 *
2848 * <em>produces:</em>
2849 *
2850 * Hello
2851 * Done!
2852 */
2853
2854/*
2855 * call-seq:
2856 * result -> value
2857 *
2858 * Returns the return value of the iterator.
2859 *
2860 * o = Object.new
2861 * def o.each
2862 * yield 1
2863 * yield 2
2864 * yield 3
2865 * 100
2866 * end
2867 *
2868 * e = o.to_enum
2869 *
2870 * puts e.next #=> 1
2871 * puts e.next #=> 2
2872 * puts e.next #=> 3
2873 *
2874 * begin
2875 * e.next
2876 * rescue StopIteration => ex
2877 * puts ex.result #=> 100
2878 * end
2879 *
2880 */
2881
2882static VALUE
2883stop_result(VALUE self)
2884{
2885 return rb_attr_get(self, id_result);
2886}
2887
2888/*
2889 * Producer
2890 */
2891
2892static void
2893producer_mark_and_move(void *p)
2894{
2895 struct producer *ptr = p;
2896 rb_gc_mark_and_move(&ptr->init);
2897 rb_gc_mark_and_move(&ptr->proc);
2898 rb_gc_mark_and_move(&ptr->size);
2899}
2900
2901#define producer_free RUBY_TYPED_DEFAULT_FREE
2902
2903static size_t
2904producer_memsize(const void *p)
2905{
2906 return sizeof(struct producer);
2907}
2908
2909static const rb_data_type_t producer_data_type = {
2910 "producer",
2911 {
2912 producer_mark_and_move,
2913 producer_free,
2914 producer_memsize,
2915 producer_mark_and_move,
2916 },
2917 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
2918};
2919
2920static struct producer *
2921producer_ptr(VALUE obj)
2922{
2923 struct producer *ptr;
2924
2925 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2926 if (!ptr || UNDEF_P(ptr->proc)) {
2927 rb_raise(rb_eArgError, "uninitialized producer");
2928 }
2929 return ptr;
2930}
2931
2932/* :nodoc: */
2933static VALUE
2934producer_allocate(VALUE klass)
2935{
2936 struct producer *ptr;
2937 VALUE obj;
2938
2939 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
2940 ptr->init = Qundef;
2941 ptr->proc = Qundef;
2942 ptr->size = Qnil;
2943
2944 return obj;
2945}
2946
2947static VALUE
2948producer_init(VALUE obj, VALUE init, VALUE proc, VALUE size)
2949{
2950 struct producer *ptr;
2951
2952 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2953
2954 if (!ptr) {
2955 rb_raise(rb_eArgError, "unallocated producer");
2956 }
2957
2958 RB_OBJ_WRITE(obj, &ptr->init, init);
2959 RB_OBJ_WRITE(obj, &ptr->proc, proc);
2960 RB_OBJ_WRITE(obj, &ptr->size, size);
2961
2962 return obj;
2963}
2964
2965static VALUE
2966producer_each_stop(VALUE dummy, VALUE exc)
2967{
2968 return rb_attr_get(exc, id_result);
2969}
2970
2971NORETURN(static VALUE producer_each_i(VALUE obj));
2972
2973static VALUE
2974producer_each_i(VALUE obj)
2975{
2976 struct producer *ptr;
2977 VALUE init, proc, curr;
2978
2979 ptr = producer_ptr(obj);
2980 init = ptr->init;
2981 proc = ptr->proc;
2982
2983 if (UNDEF_P(init)) {
2984 curr = Qnil;
2985 }
2986 else {
2987 rb_yield(init);
2988 curr = init;
2989 }
2990
2991 for (;;) {
2992 curr = rb_funcall(proc, id_call, 1, curr);
2993 rb_yield(curr);
2994 }
2995
2997}
2998
2999/* :nodoc: */
3000static VALUE
3001producer_each(VALUE obj)
3002{
3003 rb_need_block();
3004
3005 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
3006}
3007
3008static VALUE
3009producer_size(VALUE obj, VALUE args, VALUE eobj)
3010{
3011 struct producer *ptr = producer_ptr(obj);
3012 VALUE size = ptr->size;
3013
3014 if (NIL_P(size)) return Qnil;
3015 if (RB_INTEGER_TYPE_P(size) || RB_FLOAT_TYPE_P(size)) return size;
3016
3017 return rb_funcall(size, id_call, 0);
3018}
3019
3020/*
3021 * call-seq:
3022 * Enumerator.produce(initial = nil, size: nil) { |prev| block } -> enumerator
3023 *
3024 * Creates an infinite enumerator from any block, just called over and
3025 * over. The result of the previous iteration is passed to the next one.
3026 * If +initial+ is provided, it is passed to the first iteration, and
3027 * becomes the first element of the enumerator; if it is not provided,
3028 * the first iteration receives +nil+, and its result becomes the first
3029 * element of the iterator.
3030 *
3031 * Raising StopIteration from the block stops an iteration.
3032 *
3033 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3034 *
3035 * Enumerator.produce { rand(10) } # => infinite random number sequence
3036 *
3037 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3038 * enclosing_section = ancestors.find { |n| n.type == :section }
3039 *
3040 * Using ::produce together with Enumerable methods like Enumerable#detect,
3041 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3042 * for +while+ and +until+ cycles:
3043 *
3044 * # Find next Tuesday
3045 * require "date"
3046 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3047 *
3048 * # Simple lexer:
3049 * require "strscan"
3050 * scanner = StringScanner.new("7+38/6")
3051 * PATTERN = %r{\d+|[-/+*]}
3052 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3053 * # => ["7", "+", "38", "/", "6"]
3054 *
3055 * The optional +size+ keyword argument specifies the size of the enumerator,
3056 * which can be retrieved by Enumerator#size. It can be an integer,
3057 * +Float::INFINITY+, a callable object (such as a lambda), or +nil+ to
3058 * indicate unknown size. When not specified, the size defaults to
3059 * +Float::INFINITY+.
3060 *
3061 * # Infinite enumerator
3062 * enum = Enumerator.produce(1, size: Float::INFINITY, &:succ)
3063 * enum.size # => Float::INFINITY
3064 *
3065 * # Finite enumerator with known/computable size
3066 * abs_dir = File.expand_path("./baz") # => "/foo/bar/baz"
3067 * traverser = Enumerator.produce(abs_dir, size: -> { abs_dir.count("/") + 1 }) {
3068 * raise StopIteration if it == "/"
3069 * File.dirname(it)
3070 * }
3071 * traverser.size # => 4
3072 *
3073 * # Finite enumerator with unknown size
3074 * calendar = Enumerator.produce(Date.today, size: nil) {
3075 * it.monday? ? raise(StopIteration) : it + 1
3076 * }
3077 * calendar.size # => nil
3078 */
3079static VALUE
3080enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3081{
3082 VALUE init, producer, opts, size;
3083 ID keyword_ids[1];
3084
3085 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3086
3087 keyword_ids[0] = rb_intern("size");
3088 rb_scan_args_kw(RB_SCAN_ARGS_LAST_HASH_KEYWORDS, argc, argv, "01:", &init, &opts);
3089 rb_get_kwargs(opts, keyword_ids, 0, 1, &size);
3090
3091 size = UNDEF_P(size) ? DBL2NUM(HUGE_VAL) : convert_to_feasible_size_value(size);
3092
3093 if (argc == 0 || (argc == 1 && !NIL_P(opts))) {
3094 init = Qundef;
3095 }
3096
3097 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc(), size);
3098
3099 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3100}
3101
3102/*
3103 * Document-class: Enumerator::Chain
3104 *
3105 * Enumerator::Chain is a subclass of Enumerator, which represents a
3106 * chain of enumerables that works as a single enumerator.
3107 *
3108 * This type of objects can be created by Enumerable#chain and
3109 * Enumerator#+.
3110 */
3111
3112static void
3113enum_chain_mark_and_move(void *p)
3114{
3115 struct enum_chain *ptr = p;
3116 rb_gc_mark_and_move(&ptr->enums);
3117}
3118
3119#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3120
3121static size_t
3122enum_chain_memsize(const void *p)
3123{
3124 return sizeof(struct enum_chain);
3125}
3126
3127static const rb_data_type_t enum_chain_data_type = {
3128 "chain",
3129 {
3130 enum_chain_mark_and_move,
3131 enum_chain_free,
3132 enum_chain_memsize,
3133 enum_chain_mark_and_move,
3134 },
3135 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3136};
3137
3138static struct enum_chain *
3139enum_chain_ptr(VALUE obj)
3140{
3141 struct enum_chain *ptr;
3142
3143 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3144 if (!ptr || UNDEF_P(ptr->enums)) {
3145 rb_raise(rb_eArgError, "uninitialized chain");
3146 }
3147 return ptr;
3148}
3149
3150/* :nodoc: */
3151static VALUE
3152enum_chain_allocate(VALUE klass)
3153{
3154 struct enum_chain *ptr;
3155 VALUE obj;
3156
3157 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3158 ptr->enums = Qundef;
3159 ptr->pos = -1;
3160
3161 return obj;
3162}
3163
3164/*
3165 * call-seq:
3166 * Enumerator::Chain.new(*enums) -> enum
3167 *
3168 * Generates a new enumerator object that iterates over the elements
3169 * of given enumerable objects in sequence.
3170 *
3171 * e = Enumerator::Chain.new(1..3, [4, 5])
3172 * e.to_a #=> [1, 2, 3, 4, 5]
3173 * e.size #=> 5
3174 */
3175static VALUE
3176enum_chain_initialize(VALUE obj, VALUE enums)
3177{
3178 struct enum_chain *ptr;
3179
3180 rb_check_frozen(obj);
3181 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3182
3183 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3184
3185 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3186 ptr->pos = -1;
3187
3188 return obj;
3189}
3190
3191static VALUE
3192new_enum_chain(VALUE enums)
3193{
3194 long i;
3195 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3196
3197 for (i = 0; i < RARRAY_LEN(enums); i++) {
3198 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3199 return enumerable_lazy(obj);
3200 }
3201 }
3202
3203 return obj;
3204}
3205
3206/* :nodoc: */
3207static VALUE
3208enum_chain_init_copy(VALUE obj, VALUE orig)
3209{
3210 struct enum_chain *ptr0, *ptr1;
3211
3212 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3213 ptr0 = enum_chain_ptr(orig);
3214
3215 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3216
3217 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3218
3219 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3220 ptr1->pos = ptr0->pos;
3221
3222 return obj;
3223}
3224
3225static VALUE
3226enum_chain_total_size(VALUE enums)
3227{
3228 VALUE total = INT2FIX(0);
3229 long i;
3230
3231 for (i = 0; i < RARRAY_LEN(enums); i++) {
3232 VALUE size = enum_size(RARRAY_AREF(enums, i));
3233
3234 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3235 return size;
3236 }
3237 if (!RB_INTEGER_TYPE_P(size)) {
3238 return Qnil;
3239 }
3240
3241 total = rb_funcall(total, '+', 1, size);
3242 }
3243
3244 return total;
3245}
3246
3247/*
3248 * call-seq:
3249 * obj.size -> int, Float::INFINITY or nil
3250 *
3251 * Returns the total size of the enumerator chain calculated by
3252 * summing up the size of each enumerable in the chain. If any of the
3253 * enumerables reports its size as nil or Float::INFINITY, that value
3254 * is returned as the total size.
3255 */
3256static VALUE
3257enum_chain_size(VALUE obj)
3258{
3259 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3260}
3261
3262static VALUE
3263enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3264{
3265 return enum_chain_size(obj);
3266}
3267
3268static VALUE
3269enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3270{
3271 return Qnil;
3272}
3273
3274/*
3275 * call-seq:
3276 * obj.each(*args) { |...| ... } -> obj
3277 * obj.each(*args) -> enumerator
3278 *
3279 * Iterates over the elements of the first enumerable by calling the
3280 * "each" method on it with the given arguments, then proceeds to the
3281 * following enumerables in sequence until all of the enumerables are
3282 * exhausted.
3283 *
3284 * If no block is given, returns an enumerator.
3285 */
3286static VALUE
3287enum_chain_each(int argc, VALUE *argv, VALUE obj)
3288{
3289 VALUE enums, block;
3290 struct enum_chain *objptr;
3291 long i;
3292
3293 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3294
3295 objptr = enum_chain_ptr(obj);
3296 enums = objptr->enums;
3297 block = rb_block_proc();
3298
3299 for (i = 0; i < RARRAY_LEN(enums); i++) {
3300 objptr->pos = i;
3301 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3302 }
3303
3304 return obj;
3305}
3306
3307/*
3308 * call-seq:
3309 * obj.rewind -> obj
3310 *
3311 * Rewinds the enumerator chain by calling the "rewind" method on each
3312 * enumerable in reverse order. Each call is performed only if the
3313 * enumerable responds to the method.
3314 */
3315static VALUE
3316enum_chain_rewind(VALUE obj)
3317{
3318 struct enum_chain *objptr = enum_chain_ptr(obj);
3319 VALUE enums = objptr->enums;
3320 long i;
3321
3322 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3323 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3324 }
3325
3326 return obj;
3327}
3328
3329static VALUE
3330inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3331{
3332 VALUE klass = rb_obj_class(obj);
3333 struct enum_chain *ptr;
3334
3335 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3336
3337 if (!ptr || UNDEF_P(ptr->enums)) {
3338 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3339 }
3340
3341 if (recur) {
3342 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3343 }
3344
3345 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3346}
3347
3348/*
3349 * call-seq:
3350 * obj.inspect -> string
3351 *
3352 * Returns a printable version of the enumerator chain.
3353 */
3354static VALUE
3355enum_chain_inspect(VALUE obj)
3356{
3357 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3358}
3359
3360/*
3361 * call-seq:
3362 * e.chain(*enums) -> enumerator
3363 *
3364 * Returns an enumerator object generated from this enumerator and
3365 * given enumerables.
3366 *
3367 * e = (1..3).chain([4, 5])
3368 * e.to_a #=> [1, 2, 3, 4, 5]
3369 */
3370static VALUE
3371enum_chain(int argc, VALUE *argv, VALUE obj)
3372{
3373 VALUE enums = rb_ary_new_from_values(1, &obj);
3374 rb_ary_cat(enums, argv, argc);
3375 return new_enum_chain(enums);
3376}
3377
3378/*
3379 * call-seq:
3380 * e + enum -> enumerator
3381 *
3382 * Returns an enumerator object generated from this enumerator and a
3383 * given enumerable.
3384 *
3385 * e = (1..3).each + [4, 5]
3386 * e.to_a #=> [1, 2, 3, 4, 5]
3387 */
3388static VALUE
3389enumerator_plus(VALUE obj, VALUE eobj)
3390{
3391 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3392}
3393
3394/*
3395 * Document-class: Enumerator::Product
3396 *
3397 * Enumerator::Product generates a Cartesian product of any number of
3398 * enumerable objects. Iterating over the product of enumerable
3399 * objects is roughly equivalent to nested each_entry loops where the
3400 * loop for the rightmost object is put innermost.
3401 *
3402 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3403 *
3404 * innings.each do |i, h|
3405 * p [i, h]
3406 * end
3407 * # [1, "top"]
3408 * # [1, "bottom"]
3409 * # [2, "top"]
3410 * # [2, "bottom"]
3411 * # [3, "top"]
3412 * # [3, "bottom"]
3413 * # ...
3414 * # [9, "top"]
3415 * # [9, "bottom"]
3416 *
3417 * The method used against each enumerable object is `each_entry`
3418 * instead of `each` so that the product of N enumerable objects
3419 * yields an array of exactly N elements in each iteration.
3420 *
3421 * When no enumerator is given, it calls a given block once yielding
3422 * an empty argument list.
3423 *
3424 * This type of objects can be created by Enumerator.product.
3425 */
3426
3427static void
3428enum_product_mark_and_move(void *p)
3429{
3430 struct enum_product *ptr = p;
3431 rb_gc_mark_and_move(&ptr->enums);
3432}
3433
3434#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3435
3436static size_t
3437enum_product_memsize(const void *p)
3438{
3439 return sizeof(struct enum_product);
3440}
3441
3442static const rb_data_type_t enum_product_data_type = {
3443 "product",
3444 {
3445 enum_product_mark_and_move,
3446 enum_product_free,
3447 enum_product_memsize,
3448 enum_product_mark_and_move,
3449 },
3450 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3451};
3452
3453static struct enum_product *
3454enum_product_ptr(VALUE obj)
3455{
3456 struct enum_product *ptr;
3457
3458 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3459 if (!ptr || UNDEF_P(ptr->enums)) {
3460 rb_raise(rb_eArgError, "uninitialized product");
3461 }
3462 return ptr;
3463}
3464
3465/* :nodoc: */
3466static VALUE
3467enum_product_allocate(VALUE klass)
3468{
3469 struct enum_product *ptr;
3470 VALUE obj;
3471
3472 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3473 ptr->enums = Qundef;
3474
3475 return obj;
3476}
3477
3478/*
3479 * call-seq:
3480 * Enumerator::Product.new(*enums) -> enum
3481 *
3482 * Generates a new enumerator object that generates a Cartesian
3483 * product of given enumerable objects.
3484 *
3485 * e = Enumerator::Product.new(1..3, [4, 5])
3486 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3487 * e.size #=> 6
3488 */
3489static VALUE
3490enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3491{
3492 struct enum_product *ptr;
3493 VALUE enums = Qnil, options = Qnil;
3494
3495 rb_scan_args(argc, argv, "*:", &enums, &options);
3496
3497 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3498 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3499 }
3500
3501 rb_check_frozen(obj);
3502 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3503
3504 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3505
3506 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3507
3508 return obj;
3509}
3510
3511/* :nodoc: */
3512static VALUE
3513enum_product_init_copy(VALUE obj, VALUE orig)
3514{
3515 struct enum_product *ptr0, *ptr1;
3516
3517 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3518 ptr0 = enum_product_ptr(orig);
3519
3520 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3521
3522 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3523
3524 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3525
3526 return obj;
3527}
3528
3529static VALUE
3530enum_product_total_size(VALUE enums)
3531{
3532 VALUE total = INT2FIX(1);
3533 VALUE sizes = rb_ary_hidden_new(RARRAY_LEN(enums));
3534 long i;
3535
3536 for (i = 0; i < RARRAY_LEN(enums); i++) {
3537 VALUE size = enum_size(RARRAY_AREF(enums, i));
3538 if (size == INT2FIX(0)) {
3539 rb_ary_resize(sizes, 0);
3540 return size;
3541 }
3542 rb_ary_push(sizes, size);
3543 }
3544 for (i = 0; i < RARRAY_LEN(sizes); i++) {
3545 VALUE size = RARRAY_AREF(sizes, i);
3546
3547 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3548 return size;
3549 }
3550 if (!RB_INTEGER_TYPE_P(size)) {
3551 return Qnil;
3552 }
3553
3554 total = rb_funcall(total, '*', 1, size);
3555 }
3556
3557 return total;
3558}
3559
3560/*
3561 * call-seq:
3562 * obj.size -> int, Float::INFINITY or nil
3563 *
3564 * Returns the total size of the enumerator product calculated by
3565 * multiplying the sizes of enumerables in the product. If any of the
3566 * enumerables reports its size as nil or Float::INFINITY, that value
3567 * is returned as the size.
3568 */
3569static VALUE
3570enum_product_size(VALUE obj)
3571{
3572 return enum_product_total_size(enum_product_ptr(obj)->enums);
3573}
3574
3575static VALUE
3576enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3577{
3578 return enum_product_size(obj);
3579}
3580
3582 VALUE obj;
3583 VALUE block;
3584 int index;
3585 int argc;
3586 VALUE *argv;
3587};
3588
3589static VALUE product_each(VALUE, struct product_state *);
3590
3591static VALUE
3592product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3593{
3594 struct product_state *pstate = (struct product_state *)state;
3595 pstate->argv[pstate->index++] = value;
3596
3597 VALUE val = product_each(pstate->obj, pstate);
3598 pstate->index--;
3599 return val;
3600}
3601
3602static VALUE
3603product_each(VALUE obj, struct product_state *pstate)
3604{
3605 struct enum_product *ptr = enum_product_ptr(obj);
3606 VALUE enums = ptr->enums;
3607
3608 if (pstate->index < pstate->argc) {
3609 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3610
3611 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3612 }
3613 else {
3614 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3615 }
3616
3617 return obj;
3618}
3619
3620static VALUE
3621enum_product_run(VALUE obj, VALUE block)
3622{
3623 struct enum_product *ptr = enum_product_ptr(obj);
3624 int argc = RARRAY_LENINT(ptr->enums);
3625 if (argc == 0) { /* no need to allocate state.argv */
3626 rb_funcall(block, id_call, 1, rb_ary_new());
3627 return obj;
3628 }
3629
3630 VALUE argsbuf = 0;
3631 struct product_state state = {
3632 .obj = obj,
3633 .block = block,
3634 .index = 0,
3635 .argc = argc,
3636 .argv = ALLOCV_N(VALUE, argsbuf, argc),
3637 };
3638
3639 VALUE ret = product_each(obj, &state);
3640 ALLOCV_END(argsbuf);
3641 return ret;
3642}
3643
3644/*
3645 * call-seq:
3646 * obj.each { |...| ... } -> obj
3647 * obj.each -> enumerator
3648 *
3649 * Iterates over the elements of the first enumerable by calling the
3650 * "each_entry" method on it with the given arguments, then proceeds
3651 * to the following enumerables in sequence until all of the
3652 * enumerables are exhausted.
3653 *
3654 * If no block is given, returns an enumerator. Otherwise, returns self.
3655 */
3656static VALUE
3657enum_product_each(VALUE obj)
3658{
3659 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3660
3661 return enum_product_run(obj, rb_block_proc());
3662}
3663
3664/*
3665 * call-seq:
3666 * obj.rewind -> obj
3667 *
3668 * Rewinds the product enumerator by calling the "rewind" method on
3669 * each enumerable in reverse order. Each call is performed only if
3670 * the enumerable responds to the method.
3671 */
3672static VALUE
3673enum_product_rewind(VALUE obj)
3674{
3675 struct enum_product *ptr = enum_product_ptr(obj);
3676 VALUE enums = ptr->enums;
3677 long i;
3678
3679 for (i = 0; i < RARRAY_LEN(enums); i++) {
3680 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3681 }
3682
3683 return obj;
3684}
3685
3686static VALUE
3687inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3688{
3689 VALUE klass = rb_obj_class(obj);
3690 struct enum_product *ptr;
3691
3692 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3693
3694 if (!ptr || UNDEF_P(ptr->enums)) {
3695 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3696 }
3697
3698 if (recur) {
3699 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3700 }
3701
3702 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3703}
3704
3705/*
3706 * call-seq:
3707 * obj.inspect -> string
3708 *
3709 * Returns a printable version of the product enumerator.
3710 */
3711static VALUE
3712enum_product_inspect(VALUE obj)
3713{
3714 return rb_exec_recursive(inspect_enum_product, obj, 0);
3715}
3716
3717/*
3718 * call-seq:
3719 * Enumerator.product(*enums) -> enumerator
3720 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3721 *
3722 * Generates a new enumerator object that generates a Cartesian
3723 * product of given enumerable objects. This is equivalent to
3724 * Enumerator::Product.new.
3725 *
3726 * e = Enumerator.product(1..3, [4, 5])
3727 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3728 * e.size #=> 6
3729 *
3730 * When a block is given, calls the block with each N-element array
3731 * generated and returns +nil+.
3732 */
3733static VALUE
3734enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3735{
3736 VALUE enums = Qnil, options = Qnil, block = Qnil;
3737
3738 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3739
3740 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3741 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3742 }
3743
3744 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3745
3746 if (!NIL_P(block)) {
3747 enum_product_run(obj, block);
3748 return Qnil;
3749 }
3750
3751 return obj;
3752}
3753
3755 struct enumerator enumerator;
3756 VALUE begin;
3757 VALUE end;
3758 VALUE step;
3759 bool exclude_end;
3760};
3761
3762RUBY_REFERENCES(arith_seq_refs) = {
3763 RUBY_REF_EDGE(struct enumerator, obj),
3764 RUBY_REF_EDGE(struct enumerator, args),
3765 RUBY_REF_EDGE(struct enumerator, fib),
3766 RUBY_REF_EDGE(struct enumerator, dst),
3767 RUBY_REF_EDGE(struct enumerator, lookahead),
3768 RUBY_REF_EDGE(struct enumerator, feedvalue),
3769 RUBY_REF_EDGE(struct enumerator, stop_exc),
3770 RUBY_REF_EDGE(struct enumerator, size),
3771 RUBY_REF_EDGE(struct enumerator, procs),
3772
3773 RUBY_REF_EDGE(struct arith_seq, begin),
3774 RUBY_REF_EDGE(struct arith_seq, end),
3775 RUBY_REF_EDGE(struct arith_seq, step),
3776 RUBY_REF_END
3777};
3778
3779static const rb_data_type_t arith_seq_data_type = {
3780 "arithmetic_sequence",
3781 {
3782 RUBY_REFS_LIST_PTR(arith_seq_refs),
3784 NULL, // Nothing allocated externally, so don't need a memsize function
3785 NULL,
3786 },
3787 .parent = &enumerator_data_type,
3788 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
3789};
3790
3791static VALUE
3792arith_seq_allocate(VALUE klass)
3793{
3794 struct arith_seq *ptr;
3795 VALUE enum_obj;
3796
3797 enum_obj = TypedData_Make_Struct(klass, struct arith_seq, &arith_seq_data_type, ptr);
3798 ptr->enumerator.obj = Qundef;
3799
3800 return enum_obj;
3801}
3802
3803/*
3804 * Document-class: Enumerator::ArithmeticSequence
3805 *
3806 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3807 * that is a representation of sequences of numbers with common difference.
3808 * Instances of this class can be generated by the Range#step and Numeric#step
3809 * methods.
3810 *
3811 * The class can be used for slicing Array (see Array#slice) or custom
3812 * collections.
3813 */
3814
3815VALUE
3816rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3817 rb_enumerator_size_func *size_fn,
3818 VALUE beg, VALUE end, VALUE step, int excl)
3819{
3820 VALUE aseq = enumerator_init(arith_seq_allocate(rb_cArithSeq),
3821 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3822 struct arith_seq *ptr;
3823 TypedData_Get_Struct(aseq, struct arith_seq, &enumerator_data_type, ptr);
3824
3825 RB_OBJ_WRITE(aseq, &ptr->begin, beg);
3826 RB_OBJ_WRITE(aseq, &ptr->end, end);
3827 RB_OBJ_WRITE(aseq, &ptr->step, step);
3828 ptr->exclude_end = excl;
3829
3830 return aseq;
3831}
3832
3833/*
3834 * call-seq: aseq.begin -> num or nil
3835 *
3836 * Returns the number that defines the first element of this arithmetic
3837 * sequence.
3838 */
3839static inline VALUE
3840arith_seq_begin(VALUE self)
3841{
3842 struct arith_seq *ptr;
3843 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3844 return ptr->begin;
3845}
3846
3847/*
3848 * call-seq: aseq.end -> num or nil
3849 *
3850 * Returns the number that defines the end of this arithmetic sequence.
3851 */
3852static inline VALUE
3853arith_seq_end(VALUE self)
3854{
3855 struct arith_seq *ptr;
3856 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3857 return ptr->end;
3858}
3859
3860/*
3861 * call-seq: aseq.step -> num
3862 *
3863 * Returns the number that defines the common difference between
3864 * two adjacent elements in this arithmetic sequence.
3865 */
3866static inline VALUE
3867arith_seq_step(VALUE self)
3868{
3869 struct arith_seq *ptr;
3870 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3871 return ptr->step;
3872}
3873
3874/*
3875 * call-seq: aseq.exclude_end? -> true or false
3876 *
3877 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3878 */
3879static inline VALUE
3880arith_seq_exclude_end(VALUE self)
3881{
3882 struct arith_seq *ptr;
3883 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3884 return RBOOL(ptr->exclude_end);
3885}
3886
3887static inline int
3888arith_seq_exclude_end_p(VALUE self)
3889{
3890 struct arith_seq *ptr;
3891 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3892 return ptr->exclude_end;
3893}
3894
3895int
3896rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3897{
3898 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3899 component->begin = arith_seq_begin(obj);
3900 component->end = arith_seq_end(obj);
3901 component->step = arith_seq_step(obj);
3902 component->exclude_end = arith_seq_exclude_end_p(obj);
3903 return 1;
3904 }
3905 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3906 component->step = INT2FIX(1);
3907 return 1;
3908 }
3909
3910 return 0;
3911}
3912
3913VALUE
3914rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3915{
3916 RBIMPL_NONNULL_ARG(begp);
3917 RBIMPL_NONNULL_ARG(lenp);
3918 RBIMPL_NONNULL_ARG(stepp);
3919
3921 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3922 return Qfalse;
3923 }
3924
3925 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3926 *stepp = step;
3927
3928 if (step < 0) {
3929 if (aseq.exclude_end && !NIL_P(aseq.end)) {
3930 /* Handle exclusion before range reversal */
3931 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
3932
3933 /* Don't exclude the previous beginning */
3934 aseq.exclude_end = 0;
3935 }
3936 VALUE tmp = aseq.begin;
3937 aseq.begin = aseq.end;
3938 aseq.end = tmp;
3939 }
3940
3941 if (err == 0 && (step < -1 || step > 1)) {
3942 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
3943 if (*begp > len)
3944 goto out_of_range;
3945 if (*lenp > len)
3946 goto out_of_range;
3947 return Qtrue;
3948 }
3949 }
3950 else {
3951 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
3952 }
3953
3954 out_of_range:
3955 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
3956 return Qnil;
3957}
3958
3959static VALUE
3960arith_seq_take(VALUE self, VALUE num)
3961{
3962 VALUE b, e, s, ary;
3963 long n;
3964 int x;
3965
3966 n = NUM2LONG(num);
3967 if (n < 0) {
3968 rb_raise(rb_eArgError, "attempt to take negative size");
3969 }
3970 if (n == 0) {
3971 return rb_ary_new_capa(0);
3972 }
3973
3974 b = arith_seq_begin(self);
3975 e = arith_seq_end(self);
3976 s = arith_seq_step(self);
3977 x = arith_seq_exclude_end_p(self);
3978
3979 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
3980 long i = FIX2LONG(b), unit = FIX2LONG(s);
3981 ary = rb_ary_new_capa(n);
3982 while (n > 0 && FIXABLE(i)) {
3983 rb_ary_push(ary, LONG2FIX(i));
3984 i += unit; // FIXABLE + FIXABLE never overflow;
3985 --n;
3986 }
3987 if (n > 0) {
3988 b = LONG2NUM(i);
3989 while (n > 0) {
3990 rb_ary_push(ary, b);
3991 b = rb_big_plus(b, s);
3992 --n;
3993 }
3994 }
3995 return ary;
3996 }
3997 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
3998 long i = FIX2LONG(b);
3999 long end = FIX2LONG(e);
4000 long unit = FIX2LONG(s);
4001 long len;
4002
4003 if (unit >= 0) {
4004 if (!x) end += 1;
4005
4006 len = end - i;
4007 if (len < 0) len = 0;
4008 ary = rb_ary_new_capa((n < len) ? n : len);
4009 while (n > 0 && i < end) {
4010 rb_ary_push(ary, LONG2FIX(i));
4011 if (i + unit < i) break;
4012 i += unit;
4013 --n;
4014 }
4015 }
4016 else {
4017 if (!x) end -= 1;
4018
4019 len = i - end;
4020 if (len < 0) len = 0;
4021 ary = rb_ary_new_capa((n < len) ? n : len);
4022 while (n > 0 && i > end) {
4023 rb_ary_push(ary, LONG2FIX(i));
4024 if (i + unit > i) break;
4025 i += unit;
4026 --n;
4027 }
4028 }
4029 return ary;
4030 }
4031 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4032 /* generate values like ruby_float_step */
4033
4034 double unit = NUM2DBL(s);
4035 double beg = NUM2DBL(b);
4036 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
4037 double len = ruby_float_step_size(beg, end, unit, x);
4038 long i;
4039
4040 if (n > len)
4041 n = (long)len;
4042
4043 if (isinf(unit)) {
4044 if (len > 0) {
4045 ary = rb_ary_new_capa(1);
4046 rb_ary_push(ary, DBL2NUM(beg));
4047 }
4048 else {
4049 ary = rb_ary_new_capa(0);
4050 }
4051 }
4052 else if (unit == 0) {
4053 VALUE val = DBL2NUM(beg);
4054 ary = rb_ary_new_capa(n);
4055 for (i = 0; i < len; ++i) {
4056 rb_ary_push(ary, val);
4057 }
4058 }
4059 else {
4060 ary = rb_ary_new_capa(n);
4061 for (i = 0; i < n; ++i) {
4062 double d = i*unit+beg;
4063 if (unit >= 0 ? end < d : d < end) d = end;
4064 rb_ary_push(ary, DBL2NUM(d));
4065 }
4066 }
4067
4068 return ary;
4069 }
4070
4071 {
4072 VALUE argv[1];
4073 argv[0] = num;
4074 return rb_call_super(1, argv);
4075 }
4076}
4077
4078/*
4079 * call-seq:
4080 * aseq.first -> num or nil
4081 * aseq.first(n) -> an_array
4082 *
4083 * Returns the first number in this arithmetic sequence,
4084 * or an array of the first +n+ elements.
4085 */
4086static VALUE
4087arith_seq_first(int argc, VALUE *argv, VALUE self)
4088{
4089 VALUE b, e, s;
4090
4091 rb_check_arity(argc, 0, 1);
4092
4093 b = arith_seq_begin(self);
4094 e = arith_seq_end(self);
4095 s = arith_seq_step(self);
4096 if (argc == 0) {
4097 if (NIL_P(b)) {
4098 return Qnil;
4099 }
4100 if (!NIL_P(e)) {
4101 VALUE zero = INT2FIX(0);
4102 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
4103 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
4104 return Qnil;
4105 }
4106 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
4107 return Qnil;
4108 }
4109 }
4110 return b;
4111 }
4112
4113 return arith_seq_take(self, argv[0]);
4114}
4115
4116static inline VALUE
4117num_plus(VALUE a, VALUE b)
4118{
4119 if (RB_INTEGER_TYPE_P(a)) {
4120 return rb_int_plus(a, b);
4121 }
4122 else if (RB_FLOAT_TYPE_P(a)) {
4123 return rb_float_plus(a, b);
4124 }
4125 else if (RB_TYPE_P(a, T_RATIONAL)) {
4126 return rb_rational_plus(a, b);
4127 }
4128 else {
4129 return rb_funcallv(a, '+', 1, &b);
4130 }
4131}
4132
4133static inline VALUE
4134num_minus(VALUE a, VALUE b)
4135{
4136 if (RB_INTEGER_TYPE_P(a)) {
4137 return rb_int_minus(a, b);
4138 }
4139 else if (RB_FLOAT_TYPE_P(a)) {
4140 return rb_float_minus(a, b);
4141 }
4142 else if (RB_TYPE_P(a, T_RATIONAL)) {
4143 return rb_rational_minus(a, b);
4144 }
4145 else {
4146 return rb_funcallv(a, '-', 1, &b);
4147 }
4148}
4149
4150static inline VALUE
4151num_mul(VALUE a, VALUE b)
4152{
4153 if (RB_INTEGER_TYPE_P(a)) {
4154 return rb_int_mul(a, b);
4155 }
4156 else if (RB_FLOAT_TYPE_P(a)) {
4157 return rb_float_mul(a, b);
4158 }
4159 else if (RB_TYPE_P(a, T_RATIONAL)) {
4160 return rb_rational_mul(a, b);
4161 }
4162 else {
4163 return rb_funcallv(a, '*', 1, &b);
4164 }
4165}
4166
4167static inline VALUE
4168num_idiv(VALUE a, VALUE b)
4169{
4170 VALUE q;
4171 if (RB_INTEGER_TYPE_P(a)) {
4172 q = rb_int_idiv(a, b);
4173 }
4174 else if (RB_FLOAT_TYPE_P(a)) {
4175 q = rb_float_div(a, b);
4176 }
4177 else if (RB_TYPE_P(a, T_RATIONAL)) {
4178 q = rb_rational_div(a, b);
4179 }
4180 else {
4181 q = rb_funcallv(a, idDiv, 1, &b);
4182 }
4183
4184 if (RB_INTEGER_TYPE_P(q)) {
4185 return q;
4186 }
4187 else if (RB_FLOAT_TYPE_P(q)) {
4188 return rb_float_floor(q, 0);
4189 }
4190 else if (RB_TYPE_P(q, T_RATIONAL)) {
4191 return rb_rational_floor(q, 0);
4192 }
4193 else {
4194 return rb_funcall(q, rb_intern("floor"), 0);
4195 }
4196}
4197
4198/*
4199 * call-seq:
4200 * aseq.last -> num or nil
4201 * aseq.last(n) -> an_array
4202 *
4203 * Returns the last number in this arithmetic sequence,
4204 * or an array of the last +n+ elements.
4205 */
4206static VALUE
4207arith_seq_last(int argc, VALUE *argv, VALUE self)
4208{
4209 VALUE b, e, s, len_1, len, last, nv, ary;
4210 int last_is_adjusted;
4211 long n;
4212
4213 e = arith_seq_end(self);
4214 if (NIL_P(e)) {
4215 rb_raise(rb_eRangeError,
4216 "cannot get the last element of endless arithmetic sequence");
4217 }
4218
4219 b = arith_seq_begin(self);
4220 s = arith_seq_step(self);
4221
4222 len_1 = num_idiv(num_minus(e, b), s);
4223 if (rb_num_negative_int_p(len_1)) {
4224 if (argc == 0) {
4225 return Qnil;
4226 }
4227 return rb_ary_new_capa(0);
4228 }
4229
4230 last = num_plus(b, num_mul(s, len_1));
4231 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4232 last = num_minus(last, s);
4233 }
4234
4235 if (argc == 0) {
4236 return last;
4237 }
4238
4239 if (last_is_adjusted) {
4240 len = len_1;
4241 }
4242 else {
4243 len = rb_int_plus(len_1, INT2FIX(1));
4244 }
4245
4246 rb_scan_args(argc, argv, "1", &nv);
4247 if (!RB_INTEGER_TYPE_P(nv)) {
4248 nv = rb_to_int(nv);
4249 }
4250 if (RTEST(rb_int_gt(nv, len))) {
4251 nv = len;
4252 }
4253 n = NUM2LONG(nv);
4254 if (n < 0) {
4255 rb_raise(rb_eArgError, "negative array size");
4256 }
4257
4258 ary = rb_ary_new_capa(n);
4259 b = rb_int_minus(last, rb_int_mul(s, nv));
4260 while (n) {
4261 b = rb_int_plus(b, s);
4262 rb_ary_push(ary, b);
4263 --n;
4264 }
4265
4266 return ary;
4267}
4268
4269/*
4270 * call-seq:
4271 * aseq.inspect -> string
4272 *
4273 * Convert this arithmetic sequence to a printable form.
4274 */
4275static VALUE
4276arith_seq_inspect(VALUE self)
4277{
4278 struct enumerator *e;
4279 VALUE eobj, str, eargs;
4280 int range_p;
4281
4282 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4283
4284 eobj = rb_attr_get(self, id_receiver);
4285 if (NIL_P(eobj)) {
4286 eobj = e->obj;
4287 }
4288
4289 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4290 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4291
4292 rb_str_buf_append(str, rb_id2str(e->meth));
4293
4294 eargs = rb_attr_get(eobj, id_arguments);
4295 if (NIL_P(eargs)) {
4296 eargs = e->args;
4297 }
4298 if (eargs != Qfalse) {
4299 long argc = RARRAY_LEN(eargs);
4300 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
4301
4302 if (argc > 0) {
4303 VALUE kwds = Qnil;
4304
4305 rb_str_buf_cat2(str, "(");
4306
4307 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
4308 int all_key = TRUE;
4309 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
4310 if (all_key) kwds = argv[--argc];
4311 }
4312
4313 while (argc--) {
4314 VALUE arg = *argv++;
4315
4316 rb_str_append(str, rb_inspect(arg));
4317 rb_str_buf_cat2(str, ", ");
4318 }
4319 if (!NIL_P(kwds)) {
4320 rb_hash_foreach(kwds, kwd_append, str);
4321 }
4322 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
4323 rb_str_buf_cat2(str, ")");
4324 }
4325 }
4326
4327 rb_str_buf_cat2(str, ")");
4328
4329 return str;
4330}
4331
4332/*
4333 * call-seq:
4334 * aseq == obj -> true or false
4335 *
4336 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4337 * has equivalent begin, end, step, and exclude_end? settings.
4338 */
4339static VALUE
4340arith_seq_eq(VALUE self, VALUE other)
4341{
4342 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4343 return Qfalse;
4344 }
4345
4346 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4347 return Qfalse;
4348 }
4349
4350 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4351 return Qfalse;
4352 }
4353
4354 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4355 return Qfalse;
4356 }
4357
4358 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4359 return Qfalse;
4360 }
4361
4362 return Qtrue;
4363}
4364
4365/*
4366 * call-seq:
4367 * aseq.hash -> integer
4368 *
4369 * Compute a hash-value for this arithmetic sequence.
4370 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4371 * values will generate the same hash-value.
4372 *
4373 * See also Object#hash.
4374 */
4375static VALUE
4376arith_seq_hash(VALUE self)
4377{
4378 st_index_t hash;
4379 VALUE v;
4380
4381 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4382 v = rb_hash(arith_seq_begin(self));
4383 hash = rb_hash_uint(hash, NUM2LONG(v));
4384 v = rb_hash(arith_seq_end(self));
4385 hash = rb_hash_uint(hash, NUM2LONG(v));
4386 v = rb_hash(arith_seq_step(self));
4387 hash = rb_hash_uint(hash, NUM2LONG(v));
4388 hash = rb_hash_end(hash);
4389
4390 return ST2FIX(hash);
4391}
4392
4393#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4394
4396 VALUE current;
4397 VALUE end;
4398 VALUE step;
4399 int excl;
4400};
4401
4402/*
4403 * call-seq:
4404 * aseq.each {|i| block } -> aseq
4405 * aseq.each -> aseq
4406 */
4407static VALUE
4408arith_seq_each(VALUE self)
4409{
4410 VALUE c, e, s, len_1, last;
4411 int x;
4412
4413 if (!rb_block_given_p()) return self;
4414
4415 c = arith_seq_begin(self);
4416 e = arith_seq_end(self);
4417 s = arith_seq_step(self);
4418 x = arith_seq_exclude_end_p(self);
4419
4420 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4421 return self;
4422 }
4423
4424 if (NIL_P(e)) {
4425 while (1) {
4426 rb_yield(c);
4427 c = rb_int_plus(c, s);
4428 }
4429
4430 return self;
4431 }
4432
4433 if (rb_equal(s, INT2FIX(0))) {
4434 while (1) {
4435 rb_yield(c);
4436 }
4437
4438 return self;
4439 }
4440
4441 len_1 = num_idiv(num_minus(e, c), s);
4442 last = num_plus(c, num_mul(s, len_1));
4443 if (x && rb_equal(last, e)) {
4444 last = num_minus(last, s);
4445 }
4446
4447 if (rb_num_negative_int_p(s)) {
4448 while (NUM_GE(c, last)) {
4449 rb_yield(c);
4450 c = num_plus(c, s);
4451 }
4452 }
4453 else {
4454 while (NUM_GE(last, c)) {
4455 rb_yield(c);
4456 c = num_plus(c, s);
4457 }
4458 }
4459
4460 return self;
4461}
4462
4463/*
4464 * call-seq:
4465 * aseq.size -> num or nil
4466 *
4467 * Returns the number of elements in this arithmetic sequence if it is a finite
4468 * sequence. Otherwise, returns <code>nil</code>.
4469 */
4470static VALUE
4471arith_seq_size(VALUE self)
4472{
4473 VALUE b, e, s, len_1, len, last;
4474 int x;
4475
4476 b = arith_seq_begin(self);
4477 e = arith_seq_end(self);
4478 s = arith_seq_step(self);
4479 x = arith_seq_exclude_end_p(self);
4480
4481 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4482 double ee, n;
4483
4484 if (NIL_P(e)) {
4485 if (rb_num_negative_int_p(s)) {
4486 ee = -HUGE_VAL;
4487 }
4488 else {
4489 ee = HUGE_VAL;
4490 }
4491 }
4492 else {
4493 ee = NUM2DBL(e);
4494 }
4495
4496 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4497 if (isinf(n)) return DBL2NUM(n);
4498 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4499 return rb_dbl2big(n);
4500 }
4501
4502 if (NIL_P(e)) {
4503 return DBL2NUM(HUGE_VAL);
4504 }
4505
4506 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4507 s = rb_to_int(s);
4508 }
4509
4510 if (rb_equal(s, INT2FIX(0))) {
4511 return DBL2NUM(HUGE_VAL);
4512 }
4513
4514 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4515 if (rb_num_negative_int_p(len_1)) {
4516 return INT2FIX(0);
4517 }
4518
4519 last = rb_int_plus(b, rb_int_mul(s, len_1));
4520 if (x && rb_equal(last, e)) {
4521 len = len_1;
4522 }
4523 else {
4524 len = rb_int_plus(len_1, INT2FIX(1));
4525 }
4526
4527 return len;
4528}
4529
4530#define sym(name) ID2SYM(rb_intern_const(name))
4531void
4532InitVM_Enumerator(void)
4533{
4534 ID id_private = rb_intern_const("private");
4535
4536 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4537 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4538
4539 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4541
4542 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4543 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4544 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4545 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4546 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4547 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4548 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4549 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4550 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4551 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4552 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4553 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4554 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4555 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4556 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4557 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4558 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4560
4561 /* Lazy */
4563 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4564
4565 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4566 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4567 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4568 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4569 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4570 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4571 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4572 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4573 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4574 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4575 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4576 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4577 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4578 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4579 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4580 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4581 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4582 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4583
4584 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4585 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4586 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4587 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4588 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4589 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4590 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4591 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4592 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4593 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4594 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4595 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4596 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4597 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4598 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4599 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4600 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4601
4602 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4603 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4604 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4605 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4606 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4607 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4608 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4609 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4610 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4611 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4612 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4613 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4614 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4615 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4616 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4617 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4618 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4619 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4620 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4621 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4622 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4623 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4624 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4625 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4626 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4627 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4628 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4629 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4630 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4631
4632 lazy_use_super_method = rb_hash_new_with_size(18);
4633 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4634 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4635 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4636 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4637 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4638 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4639 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4640 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4641 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4642 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4643 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4644 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4645 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4646 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4647 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4648 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4649 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4650 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4651 rb_obj_freeze(lazy_use_super_method);
4652 rb_vm_register_global_object(lazy_use_super_method);
4653
4654#if 0 /* for RDoc */
4655 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4656 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4657 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4658 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4659 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4660 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4661#endif
4662 rb_define_alias(rb_cLazy, "force", "to_a");
4663
4665 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4666
4667 /* Generator */
4668 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4669 rb_include_module(rb_cGenerator, rb_mEnumerable);
4670 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4671 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4672 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4673 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4674
4675 /* Yielder */
4676 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4677 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4678 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4679 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4680 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4681 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4682
4683 /* Producer */
4684 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4685 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4686 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4687 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4688
4689 /* Chain */
4690 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4691 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4692 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4693 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4694 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4695 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4696 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4697 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4698 rb_undef_method(rb_cEnumChain, "feed");
4699 rb_undef_method(rb_cEnumChain, "next");
4700 rb_undef_method(rb_cEnumChain, "next_values");
4701 rb_undef_method(rb_cEnumChain, "peek");
4702 rb_undef_method(rb_cEnumChain, "peek_values");
4703
4704 /* Product */
4705 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4706 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4707 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4708 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4709 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4710 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4711 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4712 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4713 rb_undef_method(rb_cEnumProduct, "feed");
4714 rb_undef_method(rb_cEnumProduct, "next");
4715 rb_undef_method(rb_cEnumProduct, "next_values");
4716 rb_undef_method(rb_cEnumProduct, "peek");
4717 rb_undef_method(rb_cEnumProduct, "peek_values");
4718 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4719
4720 /* ArithmeticSequence */
4721 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4722 rb_undef_alloc_func(rb_cArithSeq);
4723 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4724 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4725 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4726 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4727 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4728 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4729 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4730 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4731 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4732 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4733 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4734 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4735 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4736 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4737
4738 rb_provide("enumerator.so"); /* for backward compatibility */
4739}
4740#undef sym
4741
4742void
4743Init_Enumerator(void)
4744{
4745 id_rewind = rb_intern_const("rewind");
4746 id_next = rb_intern_const("next");
4747 id_result = rb_intern_const("result");
4748 id_receiver = rb_intern_const("receiver");
4749 id_arguments = rb_intern_const("arguments");
4750 id_memo = rb_intern_const("memo");
4751 id_method = rb_intern_const("method");
4752 id_force = rb_intern_const("force");
4753 id_to_enum = rb_intern_const("to_enum");
4754 id_each_entry = rb_intern_const("each_entry");
4755 sym_each = ID2SYM(id_each);
4756 sym_yield = ID2SYM(rb_intern_const("yield"));
4757
4758 InitVM(Enumerator);
4759}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1798
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:1591
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:1622
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2958
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:1031
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2770
int rb_scan_args_kw(int kw_flag, int argc, const VALUE *argv, const char *fmt,...)
Identical to rb_scan_args(), except it also accepts kw_splat.
Definition class.c:3261
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3248
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:1023
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:1010
int rb_get_kwargs(VALUE keyword_hash, const ID *table, int required, int optional, VALUE *values)
Keyword argument deconstructor.
Definition class.c:3037
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1681
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:205
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:659
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:405
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:657
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:406
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:653
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:2280
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1422
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1418
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1416
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:180
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1469
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1420
VALUE rb_mKernel
Kernel module.
Definition object.c:60
VALUE rb_cObject
Object class.
Definition object.c:61
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:163
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:100
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:196
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:264
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:582
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:686
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:176
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:923
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1342
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3307
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1117
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1194
#define rb_funcall2
Definition eval.h:207
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:362
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_resize(VALUE ary, long len)
Expands or shrinks the passed array to the passed length.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:208
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:695
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:484
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2271
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:986
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1167
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:1128
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:120
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1862
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:941
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:944
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3798
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:1997
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3764
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3388
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1777
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2017
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1492
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:380
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3403
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1651
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:686
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:680
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:285
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:993
ID rb_to_id(VALUE str)
Identical to rb_intern_str(), except it tries to convert the parameter object to an instance of rb_cS...
Definition string.c:12663
int len
Length of the buffer.
Definition io.h:8
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1395
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1417
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1372
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1423
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1563
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:372
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
#define RUBY_TYPED_DEFAULT_FREE
This is a value you can set to rb_data_type_struct::dfree.
Definition rtypeddata.h:80
#define RUBY_TYPED_FREE_IMMEDIATELY
Macros to see if each corresponding flag is defined.
Definition rtypeddata.h:119
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:729
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:554
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:231
#define RB_SCAN_ARGS_LAST_HASH_KEYWORDS
Treat a final argument as keywords if it is a hash, and not as keywords otherwise.
Definition scan_args.h:59
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:104
Definition enumerator.c:236
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:211
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:318
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:264
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376