Ruby 4.0.0dev (2025-12-14 revision ab95abd44100a8dc07e270a0783f5c6b7ce584a7)
enumerator.c (ab95abd44100a8dc07e270a0783f5c6b7ce584a7)
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include "id.h"
22#include "internal.h"
23#include "internal/class.h"
24#include "internal/enumerator.h"
25#include "internal/error.h"
26#include "internal/hash.h"
27#include "internal/imemo.h"
28#include "internal/numeric.h"
29#include "internal/range.h"
30#include "internal/rational.h"
31#include "ruby/ruby.h"
32
33/*
34 * Document-class: Enumerator
35 *
36 * A class which allows both internal and external iteration.
37 *
38 * An Enumerator can be created by the following methods.
39 * - Object#to_enum
40 * - Object#enum_for
41 * - Enumerator.new
42 *
43 * Most methods have two forms: a block form where the contents
44 * are evaluated for each item in the enumeration, and a non-block form
45 * which returns a new Enumerator wrapping the iteration.
46 *
47 * enumerator = %w(one two three).each
48 * puts enumerator.class # => Enumerator
49 *
50 * enumerator.each_with_object("foo") do |item, obj|
51 * puts "#{obj}: #{item}"
52 * end
53 *
54 * # foo: one
55 * # foo: two
56 * # foo: three
57 *
58 * enum_with_obj = enumerator.each_with_object("foo")
59 * puts enum_with_obj.class # => Enumerator
60 *
61 * enum_with_obj.each do |item, obj|
62 * puts "#{obj}: #{item}"
63 * end
64 *
65 * # foo: one
66 * # foo: two
67 * # foo: three
68 *
69 * This allows you to chain Enumerators together. For example, you
70 * can map a list's elements to strings containing the index
71 * and the element as a string via:
72 *
73 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
74 * # => ["0:foo", "1:bar", "2:baz"]
75 *
76 * == External Iteration
77 *
78 * An Enumerator can also be used as an external iterator.
79 * For example, Enumerator#next returns the next value of the iterator
80 * or raises StopIteration if the Enumerator is at the end.
81 *
82 * e = [1,2,3].each # returns an enumerator object.
83 * puts e.next # => 1
84 * puts e.next # => 2
85 * puts e.next # => 3
86 * puts e.next # raises StopIteration
87 *
88 * +next+, +next_values+, +peek+, and +peek_values+ are the only methods
89 * which use external iteration (and Array#zip(Enumerable-not-Array) which uses +next+ internally).
90 *
91 * These methods do not affect other internal enumeration methods,
92 * unless the underlying iteration method itself has side-effect, e.g. IO#each_line.
93 *
94 * FrozenError will be raised if these methods are called against a frozen enumerator.
95 * Since +rewind+ and +feed+ also change state for external iteration,
96 * these methods may raise FrozenError too.
97 *
98 * External iteration differs *significantly* from internal iteration
99 * due to using a Fiber:
100 * - The Fiber adds some overhead compared to internal enumeration.
101 * - The stacktrace will only include the stack from the Enumerator, not above.
102 * - Fiber-local variables are *not* inherited inside the Enumerator Fiber,
103 * which instead starts with no Fiber-local variables.
104 * - Fiber storage variables *are* inherited and are designed
105 * to handle Enumerator Fibers. Assigning to a Fiber storage variable
106 * only affects the current Fiber, so if you want to change state
107 * in the caller Fiber of the Enumerator Fiber, you need to use an
108 * extra indirection (e.g., use some object in the Fiber storage
109 * variable and mutate some ivar of it).
110 *
111 * Concretely:
112 *
113 * Thread.current[:fiber_local] = 1
114 * Fiber[:storage_var] = 1
115 * e = Enumerator.new do |y|
116 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
117 * p Fiber[:storage_var] # => 1, inherited
118 * Fiber[:storage_var] += 1
119 * y << 42
120 * end
121 *
122 * p e.next # => 42
123 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
124 *
125 * e.each { p _1 }
126 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
127 *
128 * == Convert External Iteration to Internal Iteration
129 *
130 * You can use an external iterator to implement an internal iterator as follows:
131 *
132 * def ext_each(e)
133 * while true
134 * begin
135 * vs = e.next_values
136 * rescue StopIteration
137 * return $!.result
138 * end
139 * y = yield(*vs)
140 * e.feed y
141 * end
142 * end
143 *
144 * o = Object.new
145 *
146 * def o.each
147 * puts yield
148 * puts yield(1)
149 * puts yield(1, 2)
150 * 3
151 * end
152 *
153 * # use o.each as an internal iterator directly.
154 * puts o.each {|*x| puts x; [:b, *x] }
155 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
156 *
157 * # convert o.each to an external iterator for
158 * # implementing an internal iterator.
159 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
160 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
161 *
162 */
164static VALUE rb_cLazy;
165static ID id_rewind, id_to_enum, id_each_entry;
166static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
167static VALUE sym_each, sym_yield;
168
169static VALUE lazy_use_super_method;
170
171extern ID ruby_static_id_cause;
172
173#define id_call idCall
174#define id_cause ruby_static_id_cause
175#define id_each idEach
176#define id_eqq idEqq
177#define id_initialize idInitialize
178#define id_size idSize
179
181
183 VALUE obj;
184 ID meth;
185 VALUE args;
186 VALUE fib;
187 VALUE dst;
188 VALUE lookahead;
189 VALUE feedvalue;
190 VALUE stop_exc;
191 VALUE size;
192 VALUE procs;
194 int kw_splat;
195};
196
197RUBY_REFERENCES(enumerator_refs) = {
198 RUBY_REF_EDGE(struct enumerator, obj),
199 RUBY_REF_EDGE(struct enumerator, args),
200 RUBY_REF_EDGE(struct enumerator, fib),
201 RUBY_REF_EDGE(struct enumerator, dst),
202 RUBY_REF_EDGE(struct enumerator, lookahead),
203 RUBY_REF_EDGE(struct enumerator, feedvalue),
204 RUBY_REF_EDGE(struct enumerator, stop_exc),
205 RUBY_REF_EDGE(struct enumerator, size),
206 RUBY_REF_EDGE(struct enumerator, procs),
207 RUBY_REF_END
208};
209
210static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
211
212struct generator {
213 VALUE proc;
214 VALUE obj;
215};
216
217struct yielder {
218 VALUE proc;
219};
220
221struct producer {
222 VALUE init;
223 VALUE proc;
224 VALUE size;
225};
226
227typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
228typedef VALUE lazyenum_size_func(VALUE, VALUE);
229typedef int lazyenum_precheck_func(VALUE proc_entry);
230typedef struct {
231 lazyenum_proc_func *proc;
232 lazyenum_size_func *size;
233 lazyenum_precheck_func *precheck;
235
237 VALUE proc;
238 VALUE memo;
239 const lazyenum_funcs *fn;
240};
241
242static VALUE generator_allocate(VALUE klass);
243static VALUE generator_init(VALUE obj, VALUE proc);
244
245static VALUE rb_cEnumChain;
246
248 VALUE enums;
249 long pos;
250};
251
252static VALUE rb_cEnumProduct;
253
255 VALUE enums;
256};
257
258VALUE rb_cArithSeq;
259
260static const rb_data_type_t enumerator_data_type = {
261 "enumerator",
262 {
263 RUBY_REFS_LIST_PTR(enumerator_refs),
265 NULL, // Nothing allocated externally, so don't need a memsize function
266 NULL,
267 },
268 0, NULL, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
269};
270
271static struct enumerator *
272enumerator_ptr(VALUE obj)
273{
274 struct enumerator *ptr;
275
276 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
277 if (!ptr || UNDEF_P(ptr->obj)) {
278 rb_raise(rb_eArgError, "uninitialized enumerator");
279 }
280 return ptr;
281}
282
283static void
284proc_entry_mark_and_move(void *p)
285{
286 struct proc_entry *ptr = p;
287 rb_gc_mark_and_move(&ptr->proc);
288 rb_gc_mark_and_move(&ptr->memo);
289}
290
291static const rb_data_type_t proc_entry_data_type = {
292 "proc_entry",
293 {
294 proc_entry_mark_and_move,
296 NULL, // Nothing allocated externally, so don't need a memsize function
297 proc_entry_mark_and_move,
298 },
299 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
300};
301
302static struct proc_entry *
303proc_entry_ptr(VALUE proc_entry)
304{
305 struct proc_entry *ptr;
306
307 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
308
309 return ptr;
310}
311
312/*
313 * call-seq:
314 * obj.to_enum(method = :each, *args) -> enum
315 * obj.enum_for(method = :each, *args) -> enum
316 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
317 * obj.enum_for(method = :each, *args){|*args| block} -> enum
318 *
319 * Creates a new Enumerator which will enumerate by calling +method+ on
320 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
321 * values of enumerator.
322 *
323 * If a block is given, it will be used to calculate the size of
324 * the enumerator without the need to iterate it (see Enumerator#size).
325 *
326 * === Examples
327 *
328 * str = "xyz"
329 *
330 * enum = str.enum_for(:each_byte)
331 * enum.each { |b| puts b }
332 * # => 120
333 * # => 121
334 * # => 122
335 *
336 * # protect an array from being modified by some_method
337 * a = [1, 2, 3]
338 * some_method(a.to_enum)
339 *
340 * # String#split in block form is more memory-effective:
341 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
342 * # This could be rewritten more idiomatically with to_enum:
343 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
344 *
345 * It is typical to call to_enum when defining methods for
346 * a generic Enumerable, in case no block is passed.
347 *
348 * Here is such an example, with parameter passing and a sizing block:
349 *
350 * module Enumerable
351 * # a generic method to repeat the values of any enumerable
352 * def repeat(n)
353 * raise ArgumentError, "#{n} is negative!" if n < 0
354 * unless block_given?
355 * return to_enum(__method__, n) do # __method__ is :repeat here
356 * sz = size # Call size and multiply by n...
357 * sz * n if sz # but return nil if size itself is nil
358 * end
359 * end
360 * each do |*val|
361 * n.times { yield *val }
362 * end
363 * end
364 * end
365 *
366 * %i[hello world].repeat(2) { |w| puts w }
367 * # => Prints 'hello', 'hello', 'world', 'world'
368 * enum = (1..14).repeat(3)
369 * # => returns an Enumerator when called without a block
370 * enum.first(4) # => [1, 1, 1, 2]
371 * enum.size # => 42
372 */
373static VALUE
374obj_to_enum(int argc, VALUE *argv, VALUE obj)
375{
376 VALUE enumerator, meth = sym_each;
377
378 if (argc > 0) {
379 --argc;
380 meth = *argv++;
381 }
382 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
383 if (rb_block_given_p()) {
384 RB_OBJ_WRITE(enumerator, &enumerator_ptr(enumerator)->size, rb_block_proc());
385 }
386 return enumerator;
387}
388
389static VALUE
390enumerator_allocate(VALUE klass)
391{
392 struct enumerator *ptr;
393 VALUE enum_obj;
394
395 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
396 ptr->obj = Qundef;
397
398 return enum_obj;
399}
400
401static VALUE
402enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
403{
404 struct enumerator *ptr;
405
406 rb_check_frozen(enum_obj);
407 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
408
409 if (!ptr) {
410 rb_raise(rb_eArgError, "unallocated enumerator");
411 }
412
413 RB_OBJ_WRITE(enum_obj, &ptr->obj, obj);
414 ptr->meth = rb_to_id(meth);
415 if (argc) RB_OBJ_WRITE(enum_obj, &ptr->args, rb_ary_new4(argc, argv));
416 ptr->fib = 0;
417 ptr->dst = Qnil;
418 ptr->lookahead = Qundef;
419 ptr->feedvalue = Qundef;
420 ptr->stop_exc = Qfalse;
421 RB_OBJ_WRITE(enum_obj, &ptr->size, size);
422 ptr->size_fn = size_fn;
423 ptr->kw_splat = kw_splat;
424
425 return enum_obj;
426}
427
428static VALUE
429convert_to_feasible_size_value(VALUE obj)
430{
431 if (NIL_P(obj)) {
432 return obj;
433 }
434 else if (rb_respond_to(obj, id_call)) {
435 return obj;
436 }
437 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
438 return obj;
439 }
440 else {
441 return rb_to_int(obj);
442 }
443}
444
445/*
446 * call-seq:
447 * Enumerator.new(size = nil) { |yielder| ... }
448 *
449 * Creates a new Enumerator object, which can be used as an
450 * Enumerable.
451 *
452 * Iteration is defined by the given block, in
453 * which a "yielder" object, given as block parameter, can be used to
454 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
455 *
456 * fib = Enumerator.new do |y|
457 * a = b = 1
458 * loop do
459 * y << a
460 * a, b = b, a + b
461 * end
462 * end
463 *
464 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
465 *
466 * The optional parameter can be used to specify how to calculate the size
467 * in a lazy fashion (see Enumerator#size). It can either be a value or
468 * a callable object.
469 */
470static VALUE
471enumerator_initialize(int argc, VALUE *argv, VALUE obj)
472{
473 VALUE iter = rb_block_proc();
474 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
475 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
476 VALUE size = convert_to_feasible_size_value(arg0);
477
478 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
479}
480
481/* :nodoc: */
482static VALUE
483enumerator_init_copy(VALUE obj, VALUE orig)
484{
485 struct enumerator *ptr0, *ptr1;
486
487 if (!OBJ_INIT_COPY(obj, orig)) return obj;
488 ptr0 = enumerator_ptr(orig);
489 if (ptr0->fib) {
490 /* Fibers cannot be copied */
491 rb_raise(rb_eTypeError, "can't copy execution context");
492 }
493
494 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
495
496 if (!ptr1) {
497 rb_raise(rb_eArgError, "unallocated enumerator");
498 }
499
500 RB_OBJ_WRITE(obj, &ptr1->obj, ptr0->obj);
501 ptr1->meth = ptr0->meth;
502 RB_OBJ_WRITE(obj, &ptr1->args, ptr0->args);
503 ptr1->fib = 0;
504 ptr1->lookahead = Qundef;
505 ptr1->feedvalue = Qundef;
506 RB_OBJ_WRITE(obj, &ptr1->size, ptr0->size);
507 ptr1->size_fn = ptr0->size_fn;
508
509 return obj;
510}
511
512/*
513 * For backwards compatibility; use rb_enumeratorize_with_size
514 */
515VALUE
516rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
517{
518 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
519}
520
521static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
522static int lazy_precheck(VALUE procs);
523
524VALUE
525rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
526{
527 VALUE base_class = rb_cEnumerator;
528
529 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
530 base_class = rb_cLazy;
531 }
532 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
533 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
534 }
535
536 return enumerator_init(enumerator_allocate(base_class),
537 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
538}
539
540VALUE
541rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
542{
543 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
544}
545
546static VALUE
547enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
548{
549 int argc = 0;
550 const VALUE *argv = 0;
551 const struct enumerator *e = enumerator_ptr(obj);
552 ID meth = e->meth;
553
554 VALUE args = e->args;
555 if (args) {
556 argc = RARRAY_LENINT(args);
557 argv = RARRAY_CONST_PTR(args);
558 }
559
560 VALUE ret = rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
561
562 RB_GC_GUARD(args);
563
564 return ret;
565}
566
567/*
568 * call-seq:
569 * enum.each { |elm| block } -> obj
570 * enum.each -> enum
571 * enum.each(*appending_args) { |elm| block } -> obj
572 * enum.each(*appending_args) -> an_enumerator
573 *
574 * Iterates over the block according to how this Enumerator was constructed.
575 * If no block and no arguments are given, returns self.
576 *
577 * === Examples
578 *
579 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
580 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
581 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
582 *
583 * obj = Object.new
584 *
585 * def obj.each_arg(a, b=:b, *rest)
586 * yield a
587 * yield b
588 * yield rest
589 * :method_returned
590 * end
591 *
592 * enum = obj.to_enum :each_arg, :a, :x
593 *
594 * enum.each.to_a #=> [:a, :x, []]
595 * enum.each.equal?(enum) #=> true
596 * enum.each { |elm| elm } #=> :method_returned
597 *
598 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
599 * enum.each(:y, :z).equal?(enum) #=> false
600 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
601 *
602 */
603static VALUE
604enumerator_each(int argc, VALUE *argv, VALUE obj)
605{
606 struct enumerator *e = enumerator_ptr(obj);
607
608 if (argc > 0) {
609 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
610 if (args) {
611#if SIZEOF_INT < SIZEOF_LONG
612 /* check int range overflow */
613 rb_long2int(RARRAY_LEN(args) + argc);
614#endif
615 args = rb_ary_dup(args);
616 rb_ary_cat(args, argv, argc);
617 }
618 else {
619 args = rb_ary_new4(argc, argv);
620 }
621 RB_OBJ_WRITE(obj, &e->args, args);
622 e->size = Qnil;
623 e->size_fn = 0;
624 }
625 if (!rb_block_given_p()) return obj;
626
627 if (!lazy_precheck(e->procs)) return Qnil;
628
629 return enumerator_block_call(obj, 0, obj);
630}
631
632static VALUE
633enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
634{
635 struct MEMO *memo = (struct MEMO *)m;
636 VALUE idx = memo->v1;
637 MEMO_V1_SET(memo, rb_int_succ(idx));
638
639 if (argc <= 1)
640 return rb_yield_values(2, val, idx);
641
642 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
643}
644
645static VALUE
646enumerator_size(VALUE obj);
647
648static VALUE
649enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
650{
651 return enumerator_size(obj);
652}
653
654/*
655 * call-seq:
656 * e.with_index(offset = 0) {|(*args), idx| ... }
657 * e.with_index(offset = 0)
658 *
659 * Iterates the given block for each element with an index, which
660 * starts from +offset+. If no block is given, returns a new Enumerator
661 * that includes the index, starting from +offset+
662 *
663 * +offset+:: the starting index to use
664 *
665 */
666static VALUE
667enumerator_with_index(int argc, VALUE *argv, VALUE obj)
668{
669 VALUE memo;
670
671 rb_check_arity(argc, 0, 1);
672 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
673 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
674 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)MEMO_NEW(memo, 0, 0));
675}
676
677/*
678 * call-seq:
679 * e.each_with_index {|(*args), idx| ... }
680 * e.each_with_index
681 *
682 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
683 *
684 * If no block is given, a new Enumerator is returned that includes the index.
685 *
686 */
687static VALUE
688enumerator_each_with_index(VALUE obj)
689{
690 return enumerator_with_index(0, NULL, obj);
691}
692
693static VALUE
694enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
695{
696 if (argc <= 1)
697 return rb_yield_values(2, val, memo);
698
699 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
700}
701
702/*
703 * call-seq:
704 * e.each_with_object(obj) {|(*args), obj| ... }
705 * e.each_with_object(obj)
706 * e.with_object(obj) {|(*args), obj| ... }
707 * e.with_object(obj)
708 *
709 * Iterates the given block for each element with an arbitrary object, +obj+,
710 * and returns +obj+
711 *
712 * If no block is given, returns a new Enumerator.
713 *
714 * === Example
715 *
716 * to_three = Enumerator.new do |y|
717 * 3.times do |x|
718 * y << x
719 * end
720 * end
721 *
722 * to_three_with_string = to_three.with_object("foo")
723 * to_three_with_string.each do |x,string|
724 * puts "#{string}: #{x}"
725 * end
726 *
727 * # => foo: 0
728 * # => foo: 1
729 * # => foo: 2
730 */
731static VALUE
732enumerator_with_object(VALUE obj, VALUE memo)
733{
734 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
735 enumerator_block_call(obj, enumerator_with_object_i, memo);
736
737 return memo;
738}
739
740static VALUE
741next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
742{
743 struct enumerator *e = enumerator_ptr(obj);
744 VALUE feedvalue = Qnil;
745 VALUE args = rb_ary_new4(argc, argv);
746 rb_fiber_yield(1, &args);
747 if (!UNDEF_P(e->feedvalue)) {
748 feedvalue = e->feedvalue;
749 e->feedvalue = Qundef;
750 }
751 return feedvalue;
752}
753
754static VALUE
755next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
756{
757 struct enumerator *e = enumerator_ptr(obj);
758 VALUE nil = Qnil;
759 VALUE result;
760
761 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
762 RB_OBJ_WRITE(obj, &e->stop_exc, rb_exc_new2(rb_eStopIteration, "iteration reached an end"));
763 rb_ivar_set(e->stop_exc, id_result, result);
764 return rb_fiber_yield(1, &nil);
765}
766
767static void
768next_init(VALUE obj, struct enumerator *e)
769{
770 VALUE curr = rb_fiber_current();
771 RB_OBJ_WRITE(obj, &e->dst, curr);
772 RB_OBJ_WRITE(obj, &e->fib, rb_fiber_new(next_i, obj));
773 e->lookahead = Qundef;
774}
775
776static VALUE
777get_next_values(VALUE obj, struct enumerator *e)
778{
779 VALUE curr, vs;
780
781 if (e->stop_exc) {
782 VALUE exc = e->stop_exc;
783 VALUE result = rb_attr_get(exc, id_result);
784 VALUE mesg = rb_attr_get(exc, idMesg);
785 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
786 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
787 rb_ivar_set(stop_exc, id_cause, exc);
788 rb_ivar_set(stop_exc, id_result, result);
789 rb_exc_raise(stop_exc);
790 }
791
792 curr = rb_fiber_current();
793
794 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
795 next_init(obj, e);
796 }
797
798 vs = rb_fiber_resume(e->fib, 1, &curr);
799 if (e->stop_exc) {
800 e->fib = 0;
801 e->dst = Qnil;
802 e->lookahead = Qundef;
803 e->feedvalue = Qundef;
804 rb_exc_raise(e->stop_exc);
805 }
806 return vs;
807}
808
809/*
810 * call-seq:
811 * e.next_values -> array
812 *
813 * Returns the next object as an array in the enumerator, and move the
814 * internal position forward. When the position reached at the end,
815 * StopIteration is raised.
816 *
817 * See class-level notes about external iterators.
818 *
819 * This method can be used to distinguish <code>yield</code> and <code>yield
820 * nil</code>.
821 *
822 * === Example
823 *
824 * o = Object.new
825 * def o.each
826 * yield
827 * yield 1
828 * yield 1, 2
829 * yield nil
830 * yield [1, 2]
831 * end
832 * e = o.to_enum
833 * p e.next_values
834 * p e.next_values
835 * p e.next_values
836 * p e.next_values
837 * p e.next_values
838 * e = o.to_enum
839 * p e.next
840 * p e.next
841 * p e.next
842 * p e.next
843 * p e.next
844 *
845 * ## yield args next_values next
846 * # yield [] nil
847 * # yield 1 [1] 1
848 * # yield 1, 2 [1, 2] [1, 2]
849 * # yield nil [nil] nil
850 * # yield [1, 2] [[1, 2]] [1, 2]
851 *
852 */
853
854static VALUE
855enumerator_next_values(VALUE obj)
856{
857 struct enumerator *e = enumerator_ptr(obj);
858 VALUE vs;
859
860 rb_check_frozen(obj);
861
862 if (!UNDEF_P(e->lookahead)) {
863 vs = e->lookahead;
864 e->lookahead = Qundef;
865 return vs;
866 }
867
868 return get_next_values(obj, e);
869}
870
871static VALUE
872ary2sv(VALUE args, int dup)
873{
874 if (!RB_TYPE_P(args, T_ARRAY))
875 return args;
876
877 switch (RARRAY_LEN(args)) {
878 case 0:
879 return Qnil;
880
881 case 1:
882 return RARRAY_AREF(args, 0);
883
884 default:
885 if (dup)
886 return rb_ary_dup(args);
887 return args;
888 }
889}
890
891/*
892 * call-seq:
893 * e.next -> object
894 *
895 * Returns the next object in the enumerator, and move the internal position
896 * forward. When the position reached at the end, StopIteration is raised.
897 *
898 * === Example
899 *
900 * a = [1,2,3]
901 * e = a.to_enum
902 * p e.next #=> 1
903 * p e.next #=> 2
904 * p e.next #=> 3
905 * p e.next #raises StopIteration
906 *
907 * See class-level notes about external iterators.
908 *
909 */
910
911static VALUE
912enumerator_next(VALUE obj)
913{
914 VALUE vs = enumerator_next_values(obj);
915 return ary2sv(vs, 0);
916}
917
918static VALUE
919enumerator_peek_values(VALUE obj)
920{
921 struct enumerator *e = enumerator_ptr(obj);
922
923 rb_check_frozen(obj);
924
925 if (UNDEF_P(e->lookahead)) {
926 RB_OBJ_WRITE(obj, &e->lookahead, get_next_values(obj, e));
927 }
928
929 return e->lookahead;
930}
931
932/*
933 * call-seq:
934 * e.peek_values -> array
935 *
936 * Returns the next object as an array, similar to Enumerator#next_values, but
937 * doesn't move the internal position forward. If the position is already at
938 * the end, StopIteration is raised.
939 *
940 * See class-level notes about external iterators.
941 *
942 * === Example
943 *
944 * o = Object.new
945 * def o.each
946 * yield
947 * yield 1
948 * yield 1, 2
949 * end
950 * e = o.to_enum
951 * p e.peek_values #=> []
952 * e.next
953 * p e.peek_values #=> [1]
954 * p e.peek_values #=> [1]
955 * e.next
956 * p e.peek_values #=> [1, 2]
957 * e.next
958 * p e.peek_values # raises StopIteration
959 *
960 */
961
962static VALUE
963enumerator_peek_values_m(VALUE obj)
964{
965 return rb_ary_dup(enumerator_peek_values(obj));
966}
967
968/*
969 * call-seq:
970 * e.peek -> object
971 *
972 * Returns the next object in the enumerator, but doesn't move the internal
973 * position forward. If the position is already at the end, StopIteration
974 * is raised.
975 *
976 * See class-level notes about external iterators.
977 *
978 * === Example
979 *
980 * a = [1,2,3]
981 * e = a.to_enum
982 * p e.next #=> 1
983 * p e.peek #=> 2
984 * p e.peek #=> 2
985 * p e.peek #=> 2
986 * p e.next #=> 2
987 * p e.next #=> 3
988 * p e.peek #raises StopIteration
989 *
990 */
991
992static VALUE
993enumerator_peek(VALUE obj)
994{
995 VALUE vs = enumerator_peek_values(obj);
996 return ary2sv(vs, 1);
997}
998
999/*
1000 * call-seq:
1001 * e.feed obj -> nil
1002 *
1003 * Sets the value to be returned by the next yield inside +e+.
1004 *
1005 * If the value is not set, the yield returns nil.
1006 *
1007 * This value is cleared after being yielded.
1008 *
1009 * # Array#map passes the array's elements to "yield" and collects the
1010 * # results of "yield" as an array.
1011 * # Following example shows that "next" returns the passed elements and
1012 * # values passed to "feed" are collected as an array which can be
1013 * # obtained by StopIteration#result.
1014 * e = [1,2,3].map
1015 * p e.next #=> 1
1016 * e.feed "a"
1017 * p e.next #=> 2
1018 * e.feed "b"
1019 * p e.next #=> 3
1020 * e.feed "c"
1021 * begin
1022 * e.next
1023 * rescue StopIteration
1024 * p $!.result #=> ["a", "b", "c"]
1025 * end
1026 *
1027 * o = Object.new
1028 * def o.each
1029 * x = yield # (2) blocks
1030 * p x # (5) => "foo"
1031 * x = yield # (6) blocks
1032 * p x # (8) => nil
1033 * x = yield # (9) blocks
1034 * p x # not reached w/o another e.next
1035 * end
1036 *
1037 * e = o.to_enum
1038 * e.next # (1)
1039 * e.feed "foo" # (3)
1040 * e.next # (4)
1041 * e.next # (7)
1042 * # (10)
1043 */
1044
1045static VALUE
1046enumerator_feed(VALUE obj, VALUE v)
1047{
1048 struct enumerator *e = enumerator_ptr(obj);
1049
1050 rb_check_frozen(obj);
1051
1052 if (!UNDEF_P(e->feedvalue)) {
1053 rb_raise(rb_eTypeError, "feed value already set");
1054 }
1055 RB_OBJ_WRITE(obj, &e->feedvalue, v);
1056
1057 return Qnil;
1058}
1059
1060/*
1061 * call-seq:
1062 * e.rewind -> e
1063 *
1064 * Rewinds the enumeration sequence to the beginning.
1065 *
1066 * If the enclosed object responds to a "rewind" method, it is called.
1067 */
1068
1069static VALUE
1070enumerator_rewind(VALUE obj)
1071{
1072 struct enumerator *e = enumerator_ptr(obj);
1073
1074 rb_check_frozen(obj);
1075
1076 rb_check_funcall(e->obj, id_rewind, 0, 0);
1077
1078 e->fib = 0;
1079 e->dst = Qnil;
1080 e->lookahead = Qundef;
1081 e->feedvalue = Qundef;
1082 e->stop_exc = Qfalse;
1083 return obj;
1084}
1085
1086static struct generator *generator_ptr(VALUE obj);
1087static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1088
1089static VALUE
1090inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1091{
1092 struct enumerator *e;
1093 VALUE eobj, str, cname;
1094
1095 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1096
1097 cname = rb_obj_class(obj);
1098
1099 if (!e || UNDEF_P(e->obj)) {
1100 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1101 }
1102
1103 if (recur) {
1104 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1105 return str;
1106 }
1107
1108 if (e->procs) {
1109 long i;
1110
1111 eobj = generator_ptr(e->obj)->obj;
1112 /* In case procs chained enumerator traversing all proc entries manually */
1113 if (rb_obj_class(eobj) == cname) {
1114 str = rb_inspect(eobj);
1115 }
1116 else {
1117 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1118 }
1119 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1120 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1121 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1122 rb_str_buf_cat2(str, ">");
1123 }
1124 return str;
1125 }
1126
1127 eobj = rb_attr_get(obj, id_receiver);
1128 if (NIL_P(eobj)) {
1129 eobj = e->obj;
1130 }
1131
1132 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1133 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1134 append_method(obj, str, e->meth, e->args);
1135
1136 rb_str_buf_cat2(str, ">");
1137
1138 return str;
1139}
1140
1141static int
1142key_symbol_p(VALUE key, VALUE val, VALUE arg)
1143{
1144 if (SYMBOL_P(key)) return ST_CONTINUE;
1145 *(int *)arg = FALSE;
1146 return ST_STOP;
1147}
1148
1149static int
1150kwd_append(VALUE key, VALUE val, VALUE str)
1151{
1152 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1153 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1154 return ST_CONTINUE;
1155}
1156
1157static VALUE
1158append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1159{
1160 VALUE method, eargs;
1161
1162 method = rb_attr_get(obj, id_method);
1163 if (method != Qfalse) {
1164 if (!NIL_P(method)) {
1165 Check_Type(method, T_SYMBOL);
1166 method = rb_sym2str(method);
1167 }
1168 else {
1169 method = rb_id2str(default_method);
1170 }
1171 rb_str_buf_cat2(str, ":");
1172 rb_str_buf_append(str, method);
1173 }
1174
1175 eargs = rb_attr_get(obj, id_arguments);
1176 if (NIL_P(eargs)) {
1177 eargs = default_args;
1178 }
1179 if (eargs != Qfalse) {
1180 long argc = RARRAY_LEN(eargs);
1181 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1182
1183 if (argc > 0) {
1184 VALUE kwds = Qnil;
1185
1186 rb_str_buf_cat2(str, "(");
1187
1188 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1189 int all_key = TRUE;
1190 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1191 if (all_key) kwds = argv[--argc];
1192 }
1193
1194 while (argc--) {
1195 VALUE arg = *argv++;
1196
1197 rb_str_append(str, rb_inspect(arg));
1198 rb_str_buf_cat2(str, ", ");
1199 }
1200 if (!NIL_P(kwds)) {
1201 rb_hash_foreach(kwds, kwd_append, str);
1202 }
1203 rb_str_set_len(str, RSTRING_LEN(str)-2);
1204 rb_str_buf_cat2(str, ")");
1205 }
1206 }
1207
1208 return str;
1209}
1210
1211/*
1212 * call-seq:
1213 * e.inspect -> string
1214 *
1215 * Creates a printable version of <i>e</i>.
1216 */
1217
1218static VALUE
1219enumerator_inspect(VALUE obj)
1220{
1221 return rb_exec_recursive(inspect_enumerator, obj, 0);
1222}
1223
1224/*
1225 * call-seq:
1226 * e.size -> int, Float::INFINITY or nil
1227 *
1228 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1229 *
1230 * (1..100).to_a.permutation(4).size # => 94109400
1231 * loop.size # => Float::INFINITY
1232 * (1..100).drop_while.size # => nil
1233 */
1234
1235static VALUE
1236enumerator_size(VALUE obj)
1237{
1238 struct enumerator *e = enumerator_ptr(obj);
1239 int argc = 0;
1240 const VALUE *argv = NULL;
1241 VALUE size;
1242
1243 if (e->procs) {
1244 struct generator *g = generator_ptr(e->obj);
1245 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1246 long i = 0;
1247
1248 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1249 VALUE proc = RARRAY_AREF(e->procs, i);
1250 struct proc_entry *entry = proc_entry_ptr(proc);
1251 lazyenum_size_func *size_fn = entry->fn->size;
1252 if (!size_fn) {
1253 return Qnil;
1254 }
1255 receiver = (*size_fn)(proc, receiver);
1256 }
1257 return receiver;
1258 }
1259
1260 if (e->size_fn) {
1261 return (*e->size_fn)(e->obj, e->args, obj);
1262 }
1263 if (e->args) {
1264 argc = (int)RARRAY_LEN(e->args);
1265 argv = RARRAY_CONST_PTR(e->args);
1266 }
1267 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1268 if (!UNDEF_P(size)) return size;
1269 return e->size;
1270}
1271
1272/*
1273 * Yielder
1274 */
1275static void
1276yielder_mark_and_move(void *p)
1277{
1278 struct yielder *ptr = p;
1279 rb_gc_mark_and_move(&ptr->proc);
1280}
1281
1282static const rb_data_type_t yielder_data_type = {
1283 "yielder",
1284 {
1285 yielder_mark_and_move,
1287 NULL,
1288 yielder_mark_and_move,
1289 },
1290 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1291};
1292
1293static struct yielder *
1294yielder_ptr(VALUE obj)
1295{
1296 struct yielder *ptr;
1297
1298 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1299 if (!ptr || UNDEF_P(ptr->proc)) {
1300 rb_raise(rb_eArgError, "uninitialized yielder");
1301 }
1302 return ptr;
1303}
1304
1305/* :nodoc: */
1306static VALUE
1307yielder_allocate(VALUE klass)
1308{
1309 struct yielder *ptr;
1310 VALUE obj;
1311
1312 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1313 ptr->proc = Qundef;
1314
1315 return obj;
1316}
1317
1318static VALUE
1319yielder_init(VALUE obj, VALUE proc)
1320{
1321 struct yielder *ptr;
1322
1323 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1324
1325 if (!ptr) {
1326 rb_raise(rb_eArgError, "unallocated yielder");
1327 }
1328
1329 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1330
1331 return obj;
1332}
1333
1334/* :nodoc: */
1335static VALUE
1336yielder_initialize(VALUE obj)
1337{
1338 rb_need_block();
1339
1340 return yielder_init(obj, rb_block_proc());
1341}
1342
1343/* :nodoc: */
1344static VALUE
1345yielder_yield(VALUE obj, VALUE args)
1346{
1347 struct yielder *ptr = yielder_ptr(obj);
1348
1349 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1350}
1351
1352/* :nodoc: */
1353static VALUE
1354yielder_yield_push(VALUE obj, VALUE arg)
1355{
1356 struct yielder *ptr = yielder_ptr(obj);
1357
1358 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1359
1360 return obj;
1361}
1362
1363/*
1364 * Returns a Proc object that takes arguments and yields them.
1365 *
1366 * This method is implemented so that a Yielder object can be directly
1367 * passed to another method as a block argument.
1368 *
1369 * enum = Enumerator.new { |y|
1370 * Dir.glob("*.rb") { |file|
1371 * File.open(file) { |f| f.each_line(&y) }
1372 * }
1373 * }
1374 */
1375static VALUE
1376yielder_to_proc(VALUE obj)
1377{
1378 VALUE method = rb_obj_method(obj, sym_yield);
1379
1380 return rb_funcall(method, idTo_proc, 0);
1381}
1382
1383static VALUE
1384yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1385{
1386 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1387}
1388
1389static VALUE
1390yielder_new(void)
1391{
1392 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1393}
1394
1395/*
1396 * Generator
1397 */
1398static void
1399generator_mark_and_move(void *p)
1400{
1401 struct generator *ptr = p;
1402 rb_gc_mark_and_move(&ptr->proc);
1403 rb_gc_mark_and_move(&ptr->obj);
1404}
1405
1406static const rb_data_type_t generator_data_type = {
1407 "generator",
1408 {
1409 generator_mark_and_move,
1411 NULL,
1412 generator_mark_and_move,
1413 },
1414 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1415};
1416
1417static struct generator *
1418generator_ptr(VALUE obj)
1419{
1420 struct generator *ptr;
1421
1422 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1423 if (!ptr || UNDEF_P(ptr->proc)) {
1424 rb_raise(rb_eArgError, "uninitialized generator");
1425 }
1426 return ptr;
1427}
1428
1429/* :nodoc: */
1430static VALUE
1431generator_allocate(VALUE klass)
1432{
1433 struct generator *ptr;
1434 VALUE obj;
1435
1436 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1437 ptr->proc = Qundef;
1438
1439 return obj;
1440}
1441
1442static VALUE
1443generator_init(VALUE obj, VALUE proc)
1444{
1445 struct generator *ptr;
1446
1447 rb_check_frozen(obj);
1448 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1449
1450 if (!ptr) {
1451 rb_raise(rb_eArgError, "unallocated generator");
1452 }
1453
1454 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1455
1456 return obj;
1457}
1458
1459/* :nodoc: */
1460static VALUE
1461generator_initialize(int argc, VALUE *argv, VALUE obj)
1462{
1463 VALUE proc;
1464
1465 if (argc == 0) {
1466 rb_need_block();
1467
1468 proc = rb_block_proc();
1469 }
1470 else {
1471 rb_scan_args(argc, argv, "1", &proc);
1472
1473 if (!rb_obj_is_proc(proc))
1474 rb_raise(rb_eTypeError,
1475 "wrong argument type %"PRIsVALUE" (expected Proc)",
1476 rb_obj_class(proc));
1477
1478 if (rb_block_given_p()) {
1479 rb_warn("given block not used");
1480 }
1481 }
1482
1483 return generator_init(obj, proc);
1484}
1485
1486/* :nodoc: */
1487static VALUE
1488generator_init_copy(VALUE obj, VALUE orig)
1489{
1490 struct generator *ptr0, *ptr1;
1491
1492 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1493
1494 ptr0 = generator_ptr(orig);
1495
1496 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1497
1498 if (!ptr1) {
1499 rb_raise(rb_eArgError, "unallocated generator");
1500 }
1501
1502 RB_OBJ_WRITE(obj, &ptr1->proc, ptr0->proc);
1503
1504 return obj;
1505}
1506
1507/* :nodoc: */
1508static VALUE
1509generator_each(int argc, VALUE *argv, VALUE obj)
1510{
1511 struct generator *ptr = generator_ptr(obj);
1512 VALUE args = rb_ary_new2(argc + 1);
1513
1514 rb_ary_push(args, yielder_new());
1515 if (argc > 0) {
1516 rb_ary_cat(args, argv, argc);
1517 }
1518
1519 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1520}
1521
1522/* Lazy Enumerator methods */
1523static VALUE
1524enum_size(VALUE self)
1525{
1526 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1527 return UNDEF_P(r) ? Qnil : r;
1528}
1529
1530static VALUE
1531lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1532{
1533 return enum_size(self);
1534}
1535
1536#define lazy_receiver_size lazy_map_size
1537
1538static VALUE
1539lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1540{
1541 VALUE result;
1542 if (argc == 1) {
1543 VALUE args[2];
1544 args[0] = m;
1545 args[1] = val;
1546 result = rb_yield_values2(2, args);
1547 }
1548 else {
1549 VALUE args;
1550 int len = rb_long2int((long)argc + 1);
1551 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1552
1553 nargv[0] = m;
1554 if (argc > 0) {
1555 MEMCPY(nargv + 1, argv, VALUE, argc);
1556 }
1557 result = rb_yield_values2(len, nargv);
1558 ALLOCV_END(args);
1559 }
1560 if (UNDEF_P(result)) rb_iter_break();
1561 return Qnil;
1562}
1563
1564static VALUE
1565lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1566{
1567 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1568 return Qnil;
1569}
1570
1571#define memo_value v2
1572#define memo_flags u3.state
1573#define LAZY_MEMO_BREAK 1
1574#define LAZY_MEMO_PACKED 2
1575#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1576#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1577#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1578#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1579#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1580#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1581#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1582
1583#define LAZY_NEED_BLOCK(func) \
1584 if (!rb_block_given_p()) { \
1585 rb_raise(rb_eArgError, "tried to call lazy " #func " without a block"); \
1586 }
1587
1588static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1589
1590static VALUE
1591lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1592{
1593 VALUE yielder = RARRAY_AREF(m, 0);
1594 VALUE procs_array = RARRAY_AREF(m, 1);
1595 VALUE memos = rb_attr_get(yielder, id_memo);
1596 struct MEMO *result;
1597
1598 result = MEMO_NEW(m, rb_enum_values_pack(argc, argv),
1599 argc > 1 ? LAZY_MEMO_PACKED : 0);
1600 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1601}
1602
1603static VALUE
1604lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1605{
1606 VALUE m = result->v1;
1607 VALUE yielder = RARRAY_AREF(m, 0);
1608 VALUE procs_array = RARRAY_AREF(m, 1);
1609 VALUE memos = rb_attr_get(yielder, id_memo);
1610 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1611 if (argc > 1)
1612 LAZY_MEMO_SET_PACKED(result);
1613 else
1614 LAZY_MEMO_RESET_PACKED(result);
1615 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1616}
1617
1618static VALUE
1619lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1620{
1621 int cont = 1;
1622
1623 for (; i < RARRAY_LEN(procs_array); i++) {
1624 VALUE proc = RARRAY_AREF(procs_array, i);
1625 struct proc_entry *entry = proc_entry_ptr(proc);
1626 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1627 cont = 0;
1628 break;
1629 }
1630 }
1631
1632 if (cont) {
1633 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1634 }
1635 if (LAZY_MEMO_BREAK_P(result)) {
1636 rb_iter_break();
1637 }
1638 return result->memo_value;
1639}
1640
1641static VALUE
1642lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1643{
1644 VALUE procs = RARRAY_AREF(m, 1);
1645
1646 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1647 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1648 lazy_init_yielder, rb_ary_new3(2, val, procs));
1649 return Qnil;
1650}
1651
1652static VALUE
1653lazy_generator_init(VALUE enumerator, VALUE procs)
1654{
1656 VALUE obj;
1657 struct generator *gen_ptr;
1658 struct enumerator *e = enumerator_ptr(enumerator);
1659
1660 if (RARRAY_LEN(procs) > 0) {
1661 struct generator *old_gen_ptr = generator_ptr(e->obj);
1662 obj = old_gen_ptr->obj;
1663 }
1664 else {
1665 obj = enumerator;
1666 }
1667
1668 generator = generator_allocate(rb_cGenerator);
1669
1670 rb_block_call(generator, id_initialize, 0, 0,
1671 lazy_init_block, rb_ary_new3(2, obj, procs));
1672
1673 gen_ptr = generator_ptr(generator);
1674 RB_OBJ_WRITE(generator, &gen_ptr->obj, obj);
1675
1676 return generator;
1677}
1678
1679static int
1680lazy_precheck(VALUE procs)
1681{
1682 if (RTEST(procs)) {
1683 long num_procs = RARRAY_LEN(procs), i = num_procs;
1684 while (i-- > 0) {
1685 VALUE proc = RARRAY_AREF(procs, i);
1686 struct proc_entry *entry = proc_entry_ptr(proc);
1687 lazyenum_precheck_func *precheck = entry->fn->precheck;
1688 if (precheck && !precheck(proc)) return FALSE;
1689 }
1690 }
1691
1692 return TRUE;
1693}
1694
1695/*
1696 * Document-class: Enumerator::Lazy
1697 *
1698 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1699 * chains of operations without evaluating them immediately, and evaluating
1700 * values on as-needed basis. In order to do so it redefines most of Enumerable
1701 * methods so that they just construct another lazy enumerator.
1702 *
1703 * Enumerator::Lazy can be constructed from any Enumerable with the
1704 * Enumerable#lazy method.
1705 *
1706 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1707 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1708 *
1709 * The real enumeration is performed when any non-redefined Enumerable method
1710 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1711 * as #force for more semantic code):
1712 *
1713 * lazy.first(2)
1714 * #=> [21, 23]
1715 *
1716 * lazy.force
1717 * #=> [21, 23, 25, 27, 29]
1718 *
1719 * Note that most Enumerable methods that could be called with or without
1720 * a block, on Enumerator::Lazy will always require a block:
1721 *
1722 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1723 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1724 *
1725 * This class allows idiomatic calculations on long or infinite sequences, as well
1726 * as chaining of calculations without constructing intermediate arrays.
1727 *
1728 * Example for working with a slowly calculated sequence:
1729 *
1730 * require 'open-uri'
1731 *
1732 * # This will fetch all URLs before selecting
1733 * # necessary data
1734 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1735 * .select { |data| data.key?('stats') }
1736 * .first(5)
1737 *
1738 * # This will fetch URLs one-by-one, only till
1739 * # there is enough data to satisfy the condition
1740 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1741 * .select { |data| data.key?('stats') }
1742 * .first(5)
1743 *
1744 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1745 * is suitable for returning or passing to another method that expects
1746 * a normal enumerator.
1747 *
1748 * def active_items
1749 * groups
1750 * .lazy
1751 * .flat_map(&:items)
1752 * .reject(&:disabled)
1753 * .eager
1754 * end
1755 *
1756 * # This works lazily; if a checked item is found, it stops
1757 * # iteration and does not look into remaining groups.
1758 * first_checked = active_items.find(&:checked)
1759 *
1760 * # This returns an array of items like a normal enumerator does.
1761 * all_checked = active_items.select(&:checked)
1762 *
1763 */
1764
1765/*
1766 * call-seq:
1767 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1768 *
1769 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1770 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1771 * to the given block. The block can yield values back using +yielder+.
1772 * For example, to create a "filter+map" enumerator:
1773 *
1774 * def filter_map(sequence)
1775 * Lazy.new(sequence) do |yielder, *values|
1776 * result = yield *values
1777 * yielder << result if result
1778 * end
1779 * end
1780 *
1781 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1782 * #=> [4, 16, 36, 64, 100]
1783 */
1784static VALUE
1785lazy_initialize(int argc, VALUE *argv, VALUE self)
1786{
1787 VALUE obj, size = Qnil;
1789
1790 rb_check_arity(argc, 1, 2);
1791 LAZY_NEED_BLOCK(new);
1792 obj = argv[0];
1793 if (argc > 1) {
1794 size = argv[1];
1795 }
1796 generator = generator_allocate(rb_cGenerator);
1797 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1798 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1799 rb_ivar_set(self, id_receiver, obj);
1800
1801 return self;
1802}
1803
1804#if 0 /* for RDoc */
1805/*
1806 * call-seq:
1807 * lazy.to_a -> array
1808 * lazy.force -> array
1809 *
1810 * Expands +lazy+ enumerator to an array.
1811 * See Enumerable#to_a.
1812 */
1813static VALUE
1814lazy_to_a(VALUE self)
1815{
1816}
1817#endif
1818
1819static void
1820lazy_set_args(VALUE lazy, VALUE args)
1821{
1822 ID id = rb_frame_this_func();
1823 rb_ivar_set(lazy, id_method, ID2SYM(id));
1824 if (NIL_P(args)) {
1825 /* Qfalse indicates that the arguments are empty */
1826 rb_ivar_set(lazy, id_arguments, Qfalse);
1827 }
1828 else {
1829 rb_ivar_set(lazy, id_arguments, args);
1830 }
1831}
1832
1833#if 0
1834static VALUE
1835lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1836{
1837 struct enumerator *e = enumerator_ptr(lazy);
1838 lazy_set_args(lazy, args);
1839 e->size_fn = size_fn;
1840 return lazy;
1841}
1842#endif
1843
1844static VALUE
1845lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1846 const lazyenum_funcs *fn)
1847{
1848 struct enumerator *new_e;
1849 VALUE new_obj;
1850 VALUE new_generator;
1851 VALUE new_procs;
1852 struct enumerator *e = enumerator_ptr(obj);
1853 struct proc_entry *entry;
1854 VALUE entry_obj = TypedData_Make_Struct(rb_cObject, struct proc_entry,
1855 &proc_entry_data_type, entry);
1856 if (rb_block_given_p()) {
1857 RB_OBJ_WRITE(entry_obj, &entry->proc, rb_block_proc());
1858 }
1859 entry->fn = fn;
1860 RB_OBJ_WRITE(entry_obj, &entry->memo, args);
1861
1862 lazy_set_args(entry_obj, memo);
1863
1864 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1865 new_generator = lazy_generator_init(obj, new_procs);
1866 rb_ary_push(new_procs, entry_obj);
1867
1868 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1869 new_e = RTYPEDDATA_GET_DATA(new_obj);
1870 RB_OBJ_WRITE(new_obj, &new_e->obj, new_generator);
1871 RB_OBJ_WRITE(new_obj, &new_e->procs, new_procs);
1872
1873 if (argc > 0) {
1874 new_e->meth = rb_to_id(*argv++);
1875 --argc;
1876 }
1877 else {
1878 new_e->meth = id_each;
1879 }
1880
1881 RB_OBJ_WRITE(new_obj, &new_e->args, rb_ary_new4(argc, argv));
1882
1883 return new_obj;
1884}
1885
1886/*
1887 * call-seq:
1888 * e.lazy -> lazy_enumerator
1889 *
1890 * Returns an Enumerator::Lazy, which redefines most Enumerable
1891 * methods to postpone enumeration and enumerate values only on an
1892 * as-needed basis.
1893 *
1894 * === Example
1895 *
1896 * The following program finds pythagorean triples:
1897 *
1898 * def pythagorean_triples
1899 * (1..Float::INFINITY).lazy.flat_map {|z|
1900 * (1..z).flat_map {|x|
1901 * (x..z).select {|y|
1902 * x**2 + y**2 == z**2
1903 * }.map {|y|
1904 * [x, y, z]
1905 * }
1906 * }
1907 * }
1908 * end
1909 * # show first ten pythagorean triples
1910 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1911 * p pythagorean_triples.first(10) # first is eager
1912 * # show pythagorean triples less than 100
1913 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1914 */
1915static VALUE
1916enumerable_lazy(VALUE obj)
1917{
1918 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1919 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1920 rb_ivar_set(result, id_method, Qfalse);
1921 return result;
1922}
1923
1924static VALUE
1925lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1926{
1927 return enumerator_init(enumerator_allocate(rb_cLazy),
1928 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1929}
1930
1931/*
1932 * call-seq:
1933 * lzy.to_enum(method = :each, *args) -> lazy_enum
1934 * lzy.enum_for(method = :each, *args) -> lazy_enum
1935 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1936 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1937 *
1938 * Similar to Object#to_enum, except it returns a lazy enumerator.
1939 * This makes it easy to define Enumerable methods that will
1940 * naturally remain lazy if called from a lazy enumerator.
1941 *
1942 * For example, continuing from the example in Object#to_enum:
1943 *
1944 * # See Object#to_enum for the definition of repeat
1945 * r = 1..Float::INFINITY
1946 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1947 * r.repeat(2).class # => Enumerator
1948 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1949 * # works naturally on lazy enumerator:
1950 * r.lazy.repeat(2).class # => Enumerator::Lazy
1951 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1952 */
1953
1954static VALUE
1955lazy_to_enum(int argc, VALUE *argv, VALUE self)
1956{
1957 VALUE lazy, meth = sym_each, super_meth;
1958
1959 if (argc > 0) {
1960 --argc;
1961 meth = *argv++;
1962 }
1963 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
1964 meth = super_meth;
1965 }
1966 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
1967 if (rb_block_given_p()) {
1968 RB_OBJ_WRITE(lazy, &enumerator_ptr(lazy)->size, rb_block_proc());
1969 }
1970 return lazy;
1971}
1972
1973static VALUE
1974lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
1975{
1976 return enum_size(self);
1977}
1978
1979/*
1980 * call-seq:
1981 * lzy.eager -> enum
1982 *
1983 * Returns a non-lazy Enumerator converted from the lazy enumerator.
1984 */
1985
1986static VALUE
1987lazy_eager(VALUE self)
1988{
1989 return enumerator_init(enumerator_allocate(rb_cEnumerator),
1990 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
1991}
1992
1993static VALUE
1994lazyenum_yield(VALUE proc_entry, struct MEMO *result)
1995{
1996 struct proc_entry *entry = proc_entry_ptr(proc_entry);
1997 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
1998}
1999
2000static VALUE
2001lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2002{
2003 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2004 int argc = 1;
2005 const VALUE *argv = &result->memo_value;
2006 if (LAZY_MEMO_PACKED_P(result)) {
2007 const VALUE args = *argv;
2008 argc = RARRAY_LENINT(args);
2009 argv = RARRAY_CONST_PTR(args);
2010 }
2011 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2012}
2013
2014static struct MEMO *
2015lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2016{
2017 VALUE value = lazyenum_yield_values(proc_entry, result);
2018 LAZY_MEMO_SET_VALUE(result, value);
2019 LAZY_MEMO_RESET_PACKED(result);
2020 return result;
2021}
2022
2023static VALUE
2024lazy_map_size(VALUE entry, VALUE receiver)
2025{
2026 return receiver;
2027}
2028
2029static const lazyenum_funcs lazy_map_funcs = {
2030 lazy_map_proc, lazy_map_size,
2031};
2032
2033/*
2034 * call-seq:
2035 * lazy.collect { |obj| block } -> lazy_enumerator
2036 * lazy.map { |obj| block } -> lazy_enumerator
2037 *
2038 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2039 *
2040 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2041 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2042 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2043 * #=> [1, 4, 9]
2044 */
2045
2046static VALUE
2047lazy_map(VALUE obj)
2048{
2049 LAZY_NEED_BLOCK(map);
2050 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2051}
2052
2054 struct MEMO *result;
2055 long index;
2056};
2057
2058static VALUE
2059lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2060{
2061 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2062
2063 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2064}
2065
2066static struct MEMO *
2067lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2068{
2069 VALUE value = lazyenum_yield_values(proc_entry, result);
2070 VALUE ary = 0;
2071 const long proc_index = memo_index + 1;
2072 int break_p = LAZY_MEMO_BREAK_P(result);
2073
2074 if (RB_TYPE_P(value, T_ARRAY)) {
2075 ary = value;
2076 }
2077 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2078 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2079 LAZY_MEMO_RESET_BREAK(result);
2080 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2081 if (break_p) LAZY_MEMO_SET_BREAK(result);
2082 return 0;
2083 }
2084
2085 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2086 long i;
2087 LAZY_MEMO_RESET_BREAK(result);
2088 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2089 const VALUE argv = RARRAY_AREF(ary, i);
2090 lazy_yielder_yield(result, proc_index, 1, &argv);
2091 }
2092 if (break_p) LAZY_MEMO_SET_BREAK(result);
2093 if (i >= RARRAY_LEN(ary)) return 0;
2094 value = RARRAY_AREF(ary, i);
2095 }
2096 LAZY_MEMO_SET_VALUE(result, value);
2097 LAZY_MEMO_RESET_PACKED(result);
2098 return result;
2099}
2100
2101static const lazyenum_funcs lazy_flat_map_funcs = {
2102 lazy_flat_map_proc, 0,
2103};
2104
2105/*
2106 * call-seq:
2107 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2108 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2109 *
2110 * Returns a new lazy enumerator with the concatenated results of running
2111 * +block+ once for every element in the lazy enumerator.
2112 *
2113 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2114 * #=> ["f", "o", "o", "b", "a", "r"]
2115 *
2116 * A value +x+ returned by +block+ is decomposed if either of
2117 * the following conditions is true:
2118 *
2119 * * +x+ responds to both each and force, which means that
2120 * +x+ is a lazy enumerator.
2121 * * +x+ is an array or responds to to_ary.
2122 *
2123 * Otherwise, +x+ is contained as-is in the return value.
2124 *
2125 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2126 * #=> [{:a=>1}, {:b=>2}]
2127 */
2128static VALUE
2129lazy_flat_map(VALUE obj)
2130{
2131 LAZY_NEED_BLOCK(flat_map);
2132 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2133}
2134
2135static struct MEMO *
2136lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2137{
2138 VALUE chain = lazyenum_yield(proc_entry, result);
2139 if (!RTEST(chain)) return 0;
2140 return result;
2141}
2142
2143static const lazyenum_funcs lazy_select_funcs = {
2144 lazy_select_proc, 0,
2145};
2146
2147/*
2148 * call-seq:
2149 * lazy.find_all { |obj| block } -> lazy_enumerator
2150 * lazy.select { |obj| block } -> lazy_enumerator
2151 * lazy.filter { |obj| block } -> lazy_enumerator
2152 *
2153 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2154 */
2155static VALUE
2156lazy_select(VALUE obj)
2157{
2158 LAZY_NEED_BLOCK(select);
2159 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2160}
2161
2162static struct MEMO *
2163lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2164{
2165 VALUE value = lazyenum_yield_values(proc_entry, result);
2166 if (!RTEST(value)) return 0;
2167 LAZY_MEMO_SET_VALUE(result, value);
2168 LAZY_MEMO_RESET_PACKED(result);
2169 return result;
2170}
2171
2172static const lazyenum_funcs lazy_filter_map_funcs = {
2173 lazy_filter_map_proc, 0,
2174};
2175
2176/*
2177 * call-seq:
2178 * lazy.filter_map { |obj| block } -> lazy_enumerator
2179 *
2180 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2181 *
2182 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2183 * #=> [4, 8, 12, 16, 20]
2184 */
2185
2186static VALUE
2187lazy_filter_map(VALUE obj)
2188{
2189 LAZY_NEED_BLOCK(filter_map);
2190 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2191}
2192
2193static struct MEMO *
2194lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2195{
2196 VALUE chain = lazyenum_yield(proc_entry, result);
2197 if (RTEST(chain)) return 0;
2198 return result;
2199}
2200
2201static const lazyenum_funcs lazy_reject_funcs = {
2202 lazy_reject_proc, 0,
2203};
2204
2205/*
2206 * call-seq:
2207 * lazy.reject { |obj| block } -> lazy_enumerator
2208 *
2209 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2210 */
2211
2212static VALUE
2213lazy_reject(VALUE obj)
2214{
2215 LAZY_NEED_BLOCK(reject);
2216 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2217}
2218
2219static struct MEMO *
2220lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2221{
2222 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2223 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2224 if (!RTEST(chain)) return 0;
2225 return result;
2226}
2227
2228static struct MEMO *
2229lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2230{
2231 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2232 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2233
2234 if (!RTEST(chain)) return 0;
2235 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2236 LAZY_MEMO_SET_VALUE(result, value);
2237 LAZY_MEMO_RESET_PACKED(result);
2238
2239 return result;
2240}
2241
2242static const lazyenum_funcs lazy_grep_iter_funcs = {
2243 lazy_grep_iter_proc, 0,
2244};
2245
2246static const lazyenum_funcs lazy_grep_funcs = {
2247 lazy_grep_proc, 0,
2248};
2249
2250/*
2251 * call-seq:
2252 * lazy.grep(pattern) -> lazy_enumerator
2253 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2254 *
2255 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2256 */
2257
2258static VALUE
2259lazy_grep(VALUE obj, VALUE pattern)
2260{
2261 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2262 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2263 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2264}
2265
2266static struct MEMO *
2267lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2268{
2269 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2270 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2271 if (RTEST(chain)) return 0;
2272 return result;
2273}
2274
2275static struct MEMO *
2276lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2277{
2278 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2279 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2280
2281 if (RTEST(chain)) return 0;
2282 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2283 LAZY_MEMO_SET_VALUE(result, value);
2284 LAZY_MEMO_RESET_PACKED(result);
2285
2286 return result;
2287}
2288
2289static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2290 lazy_grep_v_iter_proc, 0,
2291};
2292
2293static const lazyenum_funcs lazy_grep_v_funcs = {
2294 lazy_grep_v_proc, 0,
2295};
2296
2297/*
2298 * call-seq:
2299 * lazy.grep_v(pattern) -> lazy_enumerator
2300 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2301 *
2302 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2303 */
2304
2305static VALUE
2306lazy_grep_v(VALUE obj, VALUE pattern)
2307{
2308 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2309 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2310 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2311}
2312
2313static VALUE
2314call_next(VALUE obj)
2315{
2316 return rb_funcall(obj, id_next, 0);
2317}
2318
2319static VALUE
2320next_stopped(VALUE obj, VALUE _)
2321{
2322 return Qnil;
2323}
2324
2325static struct MEMO *
2326lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2327{
2328 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2329 VALUE ary, arrays = entry->memo;
2330 VALUE memo = rb_ary_entry(memos, memo_index);
2331 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2332
2333 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2334 rb_ary_push(ary, result->memo_value);
2335 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2336 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2337 }
2338 LAZY_MEMO_SET_VALUE(result, ary);
2339 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2340 return result;
2341}
2342
2343static struct MEMO *
2344lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2345{
2346 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2347 VALUE arg = rb_ary_entry(memos, memo_index);
2348 VALUE zip_args = entry->memo;
2349 VALUE ary, v;
2350 long i;
2351
2352 if (NIL_P(arg)) {
2353 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2354 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2355 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2356 }
2357 rb_ary_store(memos, memo_index, arg);
2358 }
2359
2360 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2361 rb_ary_push(ary, result->memo_value);
2362 for (i = 0; i < RARRAY_LEN(arg); i++) {
2363 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2365 rb_ary_push(ary, v);
2366 }
2367 LAZY_MEMO_SET_VALUE(result, ary);
2368 return result;
2369}
2370
2371static const lazyenum_funcs lazy_zip_funcs[] = {
2372 {lazy_zip_func, lazy_receiver_size,},
2373 {lazy_zip_arrays_func, lazy_receiver_size,},
2374};
2375
2376/*
2377 * call-seq:
2378 * lazy.zip(arg, ...) -> lazy_enumerator
2379 * lazy.zip(arg, ...) { |arr| block } -> nil
2380 *
2381 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2382 * However, if a block is given to zip, values are enumerated immediately.
2383 */
2384static VALUE
2385lazy_zip(int argc, VALUE *argv, VALUE obj)
2386{
2387 VALUE ary, v;
2388 long i;
2389 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2390
2391 if (rb_block_given_p()) {
2392 return rb_call_super(argc, argv);
2393 }
2394
2395 ary = rb_ary_new2(argc);
2396 for (i = 0; i < argc; i++) {
2397 v = rb_check_array_type(argv[i]);
2398 if (NIL_P(v)) {
2399 for (; i < argc; i++) {
2400 if (!rb_respond_to(argv[i], id_each)) {
2401 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2402 rb_obj_class(argv[i]));
2403 }
2404 }
2405 ary = rb_ary_new4(argc, argv);
2406 funcs = &lazy_zip_funcs[0];
2407 break;
2408 }
2409 rb_ary_push(ary, v);
2410 }
2411
2412 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2413}
2414
2415static struct MEMO *
2416lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2417{
2418 long remain;
2419 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2420 VALUE memo = rb_ary_entry(memos, memo_index);
2421
2422 if (NIL_P(memo)) {
2423 memo = entry->memo;
2424 }
2425
2426 remain = NUM2LONG(memo);
2427 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2428 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2429 return result;
2430}
2431
2432static VALUE
2433lazy_take_size(VALUE entry, VALUE receiver)
2434{
2435 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2436 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2437 return receiver;
2438 return LONG2NUM(len);
2439}
2440
2441static int
2442lazy_take_precheck(VALUE proc_entry)
2443{
2444 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2445 return entry->memo != INT2FIX(0);
2446}
2447
2448static const lazyenum_funcs lazy_take_funcs = {
2449 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2450};
2451
2452/*
2453 * call-seq:
2454 * lazy.take(n) -> lazy_enumerator
2455 *
2456 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2457 */
2458
2459static VALUE
2460lazy_take(VALUE obj, VALUE n)
2461{
2462 long len = NUM2LONG(n);
2463
2464 if (len < 0) {
2465 rb_raise(rb_eArgError, "attempt to take negative size");
2466 }
2467
2468 n = LONG2NUM(len); /* no more conversion */
2469
2470 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2471}
2472
2473static struct MEMO *
2474lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2475{
2476 VALUE take = lazyenum_yield_values(proc_entry, result);
2477 if (!RTEST(take)) {
2478 LAZY_MEMO_SET_BREAK(result);
2479 return 0;
2480 }
2481 return result;
2482}
2483
2484static const lazyenum_funcs lazy_take_while_funcs = {
2485 lazy_take_while_proc, 0,
2486};
2487
2488/*
2489 * call-seq:
2490 * lazy.take_while { |obj| block } -> lazy_enumerator
2491 *
2492 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2493 */
2494
2495static VALUE
2496lazy_take_while(VALUE obj)
2497{
2498 LAZY_NEED_BLOCK(take_while);
2499 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2500}
2501
2502static VALUE
2503lazy_drop_size(VALUE proc_entry, VALUE receiver)
2504{
2505 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2506 if (NIL_P(receiver))
2507 return receiver;
2508 if (FIXNUM_P(receiver)) {
2509 len = FIX2LONG(receiver) - len;
2510 return LONG2FIX(len < 0 ? 0 : len);
2511 }
2512 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2513}
2514
2515static struct MEMO *
2516lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2517{
2518 long remain;
2519 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2520 VALUE memo = rb_ary_entry(memos, memo_index);
2521
2522 if (NIL_P(memo)) {
2523 memo = entry->memo;
2524 }
2525 remain = NUM2LONG(memo);
2526 if (remain > 0) {
2527 --remain;
2528 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2529 return 0;
2530 }
2531
2532 return result;
2533}
2534
2535static const lazyenum_funcs lazy_drop_funcs = {
2536 lazy_drop_proc, lazy_drop_size,
2537};
2538
2539/*
2540 * call-seq:
2541 * lazy.drop(n) -> lazy_enumerator
2542 *
2543 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2544 */
2545
2546static VALUE
2547lazy_drop(VALUE obj, VALUE n)
2548{
2549 long len = NUM2LONG(n);
2550 VALUE argv[2];
2551 argv[0] = sym_each;
2552 argv[1] = n;
2553
2554 if (len < 0) {
2555 rb_raise(rb_eArgError, "attempt to drop negative size");
2556 }
2557
2558 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2559}
2560
2561static struct MEMO *
2562lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2563{
2564 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2565 VALUE memo = rb_ary_entry(memos, memo_index);
2566
2567 if (NIL_P(memo)) {
2568 memo = entry->memo;
2569 }
2570
2571 if (!RTEST(memo)) {
2572 VALUE drop = lazyenum_yield_values(proc_entry, result);
2573 if (RTEST(drop)) return 0;
2574 rb_ary_store(memos, memo_index, Qtrue);
2575 }
2576 return result;
2577}
2578
2579static const lazyenum_funcs lazy_drop_while_funcs = {
2580 lazy_drop_while_proc, 0,
2581};
2582
2583/*
2584 * call-seq:
2585 * lazy.drop_while { |obj| block } -> lazy_enumerator
2586 *
2587 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2588 */
2589
2590static VALUE
2591lazy_drop_while(VALUE obj)
2592{
2593 LAZY_NEED_BLOCK(drop_while);
2594 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2595}
2596
2597static int
2598lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2599{
2600 VALUE hash = rb_ary_entry(memos, memo_index);
2601
2602 if (NIL_P(hash)) {
2603 hash = rb_obj_hide(rb_hash_new());
2604 rb_ary_store(memos, memo_index, hash);
2605 }
2606
2607 return rb_hash_add_new_element(hash, chain, Qfalse);
2608}
2609
2610static struct MEMO *
2611lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2612{
2613 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2614 return result;
2615}
2616
2617static struct MEMO *
2618lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2619{
2620 VALUE chain = lazyenum_yield(proc_entry, result);
2621
2622 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2623 return result;
2624}
2625
2626static const lazyenum_funcs lazy_uniq_iter_funcs = {
2627 lazy_uniq_iter_proc, 0,
2628};
2629
2630static const lazyenum_funcs lazy_uniq_funcs = {
2631 lazy_uniq_proc, 0,
2632};
2633
2634/*
2635 * call-seq:
2636 * lazy.uniq -> lazy_enumerator
2637 * lazy.uniq { |item| block } -> lazy_enumerator
2638 *
2639 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2640 */
2641
2642static VALUE
2643lazy_uniq(VALUE obj)
2644{
2645 const lazyenum_funcs *const funcs =
2646 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2647 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2648}
2649
2650static struct MEMO *
2651lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2652{
2653 if (NIL_P(result->memo_value)) return 0;
2654 return result;
2655}
2656
2657static const lazyenum_funcs lazy_compact_funcs = {
2658 lazy_compact_proc, 0,
2659};
2660
2661/*
2662 * call-seq:
2663 * lazy.compact -> lazy_enumerator
2664 *
2665 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2666 */
2667
2668static VALUE
2669lazy_compact(VALUE obj)
2670{
2671 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2672}
2673
2674static struct MEMO *
2675lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2676{
2677 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2678 VALUE memo = rb_ary_entry(memos, memo_index);
2679 VALUE argv[2];
2680
2681 if (NIL_P(memo)) {
2682 memo = entry->memo;
2683 }
2684
2685 argv[0] = result->memo_value;
2686 argv[1] = memo;
2687 if (entry->proc) {
2688 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2689 LAZY_MEMO_RESET_PACKED(result);
2690 }
2691 else {
2692 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2693 LAZY_MEMO_SET_PACKED(result);
2694 }
2695 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2696 return result;
2697}
2698
2699static VALUE
2700lazy_with_index_size(VALUE proc, VALUE receiver)
2701{
2702 return receiver;
2703}
2704
2705static const lazyenum_funcs lazy_with_index_funcs = {
2706 lazy_with_index_proc, lazy_with_index_size,
2707};
2708
2709/*
2710 * call-seq:
2711 * lazy.with_index(offset = 0) {|(*args), idx| block }
2712 * lazy.with_index(offset = 0)
2713 *
2714 * If a block is given, returns a lazy enumerator that will
2715 * iterate over the given block for each element
2716 * with an index, which starts from +offset+, and returns a
2717 * lazy enumerator that yields the same values (without the index).
2718 *
2719 * If a block is not given, returns a new lazy enumerator that
2720 * includes the index, starting from +offset+.
2721 *
2722 * +offset+:: the starting index to use
2723 *
2724 * See Enumerator#with_index.
2725 */
2726static VALUE
2727lazy_with_index(int argc, VALUE *argv, VALUE obj)
2728{
2729 VALUE memo;
2730
2731 rb_scan_args(argc, argv, "01", &memo);
2732 if (NIL_P(memo))
2733 memo = LONG2NUM(0);
2734
2735 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2736}
2737
2738#if 0 /* for RDoc */
2739
2740/*
2741 * call-seq:
2742 * lazy.chunk { |elt| ... } -> lazy_enumerator
2743 *
2744 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2745 */
2746static VALUE
2747lazy_chunk(VALUE self)
2748{
2749}
2750
2751/*
2752 * call-seq:
2753 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2754 *
2755 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2756 */
2757static VALUE
2758lazy_chunk_while(VALUE self)
2759{
2760}
2761
2762/*
2763 * call-seq:
2764 * lazy.slice_after(pattern) -> lazy_enumerator
2765 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2766 *
2767 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2768 */
2769static VALUE
2770lazy_slice_after(VALUE self)
2771{
2772}
2773
2774/*
2775 * call-seq:
2776 * lazy.slice_before(pattern) -> lazy_enumerator
2777 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2778 *
2779 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2780 */
2781static VALUE
2782lazy_slice_before(VALUE self)
2783{
2784}
2785
2786/*
2787 * call-seq:
2788 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2789 *
2790 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2791 */
2792static VALUE
2793lazy_slice_when(VALUE self)
2794{
2795}
2796# endif
2797
2798static VALUE
2799lazy_super(int argc, VALUE *argv, VALUE lazy)
2800{
2801 return enumerable_lazy(rb_call_super(argc, argv));
2802}
2803
2804/*
2805 * call-seq:
2806 * enum.lazy -> lazy_enumerator
2807 *
2808 * Returns self.
2809 */
2810
2811static VALUE
2812lazy_lazy(VALUE obj)
2813{
2814 return obj;
2815}
2816
2817/*
2818 * Document-class: StopIteration
2819 *
2820 * Raised to stop the iteration, in particular by Enumerator#next. It is
2821 * rescued by Kernel#loop.
2822 *
2823 * loop do
2824 * puts "Hello"
2825 * raise StopIteration
2826 * puts "World"
2827 * end
2828 * puts "Done!"
2829 *
2830 * <em>produces:</em>
2831 *
2832 * Hello
2833 * Done!
2834 */
2835
2836/*
2837 * call-seq:
2838 * result -> value
2839 *
2840 * Returns the return value of the iterator.
2841 *
2842 * o = Object.new
2843 * def o.each
2844 * yield 1
2845 * yield 2
2846 * yield 3
2847 * 100
2848 * end
2849 *
2850 * e = o.to_enum
2851 *
2852 * puts e.next #=> 1
2853 * puts e.next #=> 2
2854 * puts e.next #=> 3
2855 *
2856 * begin
2857 * e.next
2858 * rescue StopIteration => ex
2859 * puts ex.result #=> 100
2860 * end
2861 *
2862 */
2863
2864static VALUE
2865stop_result(VALUE self)
2866{
2867 return rb_attr_get(self, id_result);
2868}
2869
2870/*
2871 * Producer
2872 */
2873
2874static void
2875producer_mark_and_move(void *p)
2876{
2877 struct producer *ptr = p;
2878 rb_gc_mark_and_move(&ptr->init);
2879 rb_gc_mark_and_move(&ptr->proc);
2880 rb_gc_mark_and_move(&ptr->size);
2881}
2882
2883#define producer_free RUBY_TYPED_DEFAULT_FREE
2884
2885static size_t
2886producer_memsize(const void *p)
2887{
2888 return sizeof(struct producer);
2889}
2890
2891static const rb_data_type_t producer_data_type = {
2892 "producer",
2893 {
2894 producer_mark_and_move,
2895 producer_free,
2896 producer_memsize,
2897 producer_mark_and_move,
2898 },
2899 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
2900};
2901
2902static struct producer *
2903producer_ptr(VALUE obj)
2904{
2905 struct producer *ptr;
2906
2907 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2908 if (!ptr || UNDEF_P(ptr->proc)) {
2909 rb_raise(rb_eArgError, "uninitialized producer");
2910 }
2911 return ptr;
2912}
2913
2914/* :nodoc: */
2915static VALUE
2916producer_allocate(VALUE klass)
2917{
2918 struct producer *ptr;
2919 VALUE obj;
2920
2921 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
2922 ptr->init = Qundef;
2923 ptr->proc = Qundef;
2924 ptr->size = Qnil;
2925
2926 return obj;
2927}
2928
2929static VALUE
2930producer_init(VALUE obj, VALUE init, VALUE proc, VALUE size)
2931{
2932 struct producer *ptr;
2933
2934 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2935
2936 if (!ptr) {
2937 rb_raise(rb_eArgError, "unallocated producer");
2938 }
2939
2940 RB_OBJ_WRITE(obj, &ptr->init, init);
2941 RB_OBJ_WRITE(obj, &ptr->proc, proc);
2942 RB_OBJ_WRITE(obj, &ptr->size, size);
2943
2944 return obj;
2945}
2946
2947static VALUE
2948producer_each_stop(VALUE dummy, VALUE exc)
2949{
2950 return rb_attr_get(exc, id_result);
2951}
2952
2953NORETURN(static VALUE producer_each_i(VALUE obj));
2954
2955static VALUE
2956producer_each_i(VALUE obj)
2957{
2958 struct producer *ptr;
2959 VALUE init, proc, curr;
2960
2961 ptr = producer_ptr(obj);
2962 init = ptr->init;
2963 proc = ptr->proc;
2964
2965 if (UNDEF_P(init)) {
2966 curr = Qnil;
2967 }
2968 else {
2969 rb_yield(init);
2970 curr = init;
2971 }
2972
2973 for (;;) {
2974 curr = rb_funcall(proc, id_call, 1, curr);
2975 rb_yield(curr);
2976 }
2977
2979}
2980
2981/* :nodoc: */
2982static VALUE
2983producer_each(VALUE obj)
2984{
2985 rb_need_block();
2986
2987 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
2988}
2989
2990static VALUE
2991producer_size(VALUE obj, VALUE args, VALUE eobj)
2992{
2993 struct producer *ptr = producer_ptr(obj);
2994 VALUE size = ptr->size;
2995
2996 if (NIL_P(size)) return Qnil;
2997 if (RB_INTEGER_TYPE_P(size) || RB_FLOAT_TYPE_P(size)) return size;
2998
2999 return rb_funcall(size, id_call, 0);
3000}
3001
3002/*
3003 * call-seq:
3004 * Enumerator.produce(initial = nil, size: nil) { |prev| block } -> enumerator
3005 *
3006 * Creates an infinite enumerator from any block, just called over and
3007 * over. The result of the previous iteration is passed to the next one.
3008 * If +initial+ is provided, it is passed to the first iteration, and
3009 * becomes the first element of the enumerator; if it is not provided,
3010 * the first iteration receives +nil+, and its result becomes the first
3011 * element of the iterator.
3012 *
3013 * Raising StopIteration from the block stops an iteration.
3014 *
3015 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3016 *
3017 * Enumerator.produce { rand(10) } # => infinite random number sequence
3018 *
3019 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3020 * enclosing_section = ancestors.find { |n| n.type == :section }
3021 *
3022 * Using ::produce together with Enumerable methods like Enumerable#detect,
3023 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3024 * for +while+ and +until+ cycles:
3025 *
3026 * # Find next Tuesday
3027 * require "date"
3028 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3029 *
3030 * # Simple lexer:
3031 * require "strscan"
3032 * scanner = StringScanner.new("7+38/6")
3033 * PATTERN = %r{\d+|[-/+*]}
3034 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3035 * # => ["7", "+", "38", "/", "6"]
3036 *
3037 * The optional +size+ keyword argument specifies the size of the enumerator,
3038 * which can be retrieved by Enumerator#size. It can be an integer,
3039 * +Float::INFINITY+, a callable object (such as a lambda), or +nil+ to
3040 * indicate unknown size. When not specified, the size is unknown (+nil+).
3041 *
3042 * # Infinite enumerator
3043 * enum = Enumerator.produce(1, size: Float::INFINITY, &:succ)
3044 * enum.size # => Float::INFINITY
3045 *
3046 * # Finite enumerator with known/computable size
3047 * abs_dir = File.expand_path("./baz") # => "/foo/bar/baz"
3048 * traverser = Enumerator.produce(abs_dir, size: -> { abs_dir.count("/") + 1 }) {
3049 * raise StopIteration if it == "/"
3050 * File.dirname(it)
3051 * }
3052 * traverser.size # => 4
3053 */
3054static VALUE
3055enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3056{
3057 VALUE init, producer, opts, size;
3058 ID keyword_ids[1];
3059
3060 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3061
3062 keyword_ids[0] = rb_intern("size");
3063 rb_scan_args_kw(RB_SCAN_ARGS_LAST_HASH_KEYWORDS, argc, argv, "01:", &init, &opts);
3064 rb_get_kwargs(opts, keyword_ids, 0, 1, &size);
3065
3066 size = UNDEF_P(size) ? Qnil : convert_to_feasible_size_value(size);
3067
3068 if (argc == 0 || (argc == 1 && !NIL_P(opts))) {
3069 init = Qundef;
3070 }
3071
3072 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc(), size);
3073
3074 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3075}
3076
3077/*
3078 * Document-class: Enumerator::Chain
3079 *
3080 * Enumerator::Chain is a subclass of Enumerator, which represents a
3081 * chain of enumerables that works as a single enumerator.
3082 *
3083 * This type of objects can be created by Enumerable#chain and
3084 * Enumerator#+.
3085 */
3086
3087static void
3088enum_chain_mark_and_move(void *p)
3089{
3090 struct enum_chain *ptr = p;
3091 rb_gc_mark_and_move(&ptr->enums);
3092}
3093
3094#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3095
3096static size_t
3097enum_chain_memsize(const void *p)
3098{
3099 return sizeof(struct enum_chain);
3100}
3101
3102static const rb_data_type_t enum_chain_data_type = {
3103 "chain",
3104 {
3105 enum_chain_mark_and_move,
3106 enum_chain_free,
3107 enum_chain_memsize,
3108 enum_chain_mark_and_move,
3109 },
3110 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3111};
3112
3113static struct enum_chain *
3114enum_chain_ptr(VALUE obj)
3115{
3116 struct enum_chain *ptr;
3117
3118 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3119 if (!ptr || UNDEF_P(ptr->enums)) {
3120 rb_raise(rb_eArgError, "uninitialized chain");
3121 }
3122 return ptr;
3123}
3124
3125/* :nodoc: */
3126static VALUE
3127enum_chain_allocate(VALUE klass)
3128{
3129 struct enum_chain *ptr;
3130 VALUE obj;
3131
3132 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3133 ptr->enums = Qundef;
3134 ptr->pos = -1;
3135
3136 return obj;
3137}
3138
3139/*
3140 * call-seq:
3141 * Enumerator::Chain.new(*enums) -> enum
3142 *
3143 * Generates a new enumerator object that iterates over the elements
3144 * of given enumerable objects in sequence.
3145 *
3146 * e = Enumerator::Chain.new(1..3, [4, 5])
3147 * e.to_a #=> [1, 2, 3, 4, 5]
3148 * e.size #=> 5
3149 */
3150static VALUE
3151enum_chain_initialize(VALUE obj, VALUE enums)
3152{
3153 struct enum_chain *ptr;
3154
3155 rb_check_frozen(obj);
3156 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3157
3158 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3159
3160 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3161 ptr->pos = -1;
3162
3163 return obj;
3164}
3165
3166static VALUE
3167new_enum_chain(VALUE enums)
3168{
3169 long i;
3170 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3171
3172 for (i = 0; i < RARRAY_LEN(enums); i++) {
3173 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3174 return enumerable_lazy(obj);
3175 }
3176 }
3177
3178 return obj;
3179}
3180
3181/* :nodoc: */
3182static VALUE
3183enum_chain_init_copy(VALUE obj, VALUE orig)
3184{
3185 struct enum_chain *ptr0, *ptr1;
3186
3187 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3188 ptr0 = enum_chain_ptr(orig);
3189
3190 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3191
3192 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3193
3194 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3195 ptr1->pos = ptr0->pos;
3196
3197 return obj;
3198}
3199
3200static VALUE
3201enum_chain_total_size(VALUE enums)
3202{
3203 VALUE total = INT2FIX(0);
3204 long i;
3205
3206 for (i = 0; i < RARRAY_LEN(enums); i++) {
3207 VALUE size = enum_size(RARRAY_AREF(enums, i));
3208
3209 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3210 return size;
3211 }
3212 if (!RB_INTEGER_TYPE_P(size)) {
3213 return Qnil;
3214 }
3215
3216 total = rb_funcall(total, '+', 1, size);
3217 }
3218
3219 return total;
3220}
3221
3222/*
3223 * call-seq:
3224 * obj.size -> int, Float::INFINITY or nil
3225 *
3226 * Returns the total size of the enumerator chain calculated by
3227 * summing up the size of each enumerable in the chain. If any of the
3228 * enumerables reports its size as nil or Float::INFINITY, that value
3229 * is returned as the total size.
3230 */
3231static VALUE
3232enum_chain_size(VALUE obj)
3233{
3234 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3235}
3236
3237static VALUE
3238enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3239{
3240 return enum_chain_size(obj);
3241}
3242
3243static VALUE
3244enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3245{
3246 return Qnil;
3247}
3248
3249/*
3250 * call-seq:
3251 * obj.each(*args) { |...| ... } -> obj
3252 * obj.each(*args) -> enumerator
3253 *
3254 * Iterates over the elements of the first enumerable by calling the
3255 * "each" method on it with the given arguments, then proceeds to the
3256 * following enumerables in sequence until all of the enumerables are
3257 * exhausted.
3258 *
3259 * If no block is given, returns an enumerator.
3260 */
3261static VALUE
3262enum_chain_each(int argc, VALUE *argv, VALUE obj)
3263{
3264 VALUE enums, block;
3265 struct enum_chain *objptr;
3266 long i;
3267
3268 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3269
3270 objptr = enum_chain_ptr(obj);
3271 enums = objptr->enums;
3272 block = rb_block_proc();
3273
3274 for (i = 0; i < RARRAY_LEN(enums); i++) {
3275 objptr->pos = i;
3276 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3277 }
3278
3279 return obj;
3280}
3281
3282/*
3283 * call-seq:
3284 * obj.rewind -> obj
3285 *
3286 * Rewinds the enumerator chain by calling the "rewind" method on each
3287 * enumerable in reverse order. Each call is performed only if the
3288 * enumerable responds to the method.
3289 */
3290static VALUE
3291enum_chain_rewind(VALUE obj)
3292{
3293 struct enum_chain *objptr = enum_chain_ptr(obj);
3294 VALUE enums = objptr->enums;
3295 long i;
3296
3297 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3298 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3299 }
3300
3301 return obj;
3302}
3303
3304static VALUE
3305inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3306{
3307 VALUE klass = rb_obj_class(obj);
3308 struct enum_chain *ptr;
3309
3310 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3311
3312 if (!ptr || UNDEF_P(ptr->enums)) {
3313 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3314 }
3315
3316 if (recur) {
3317 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3318 }
3319
3320 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3321}
3322
3323/*
3324 * call-seq:
3325 * obj.inspect -> string
3326 *
3327 * Returns a printable version of the enumerator chain.
3328 */
3329static VALUE
3330enum_chain_inspect(VALUE obj)
3331{
3332 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3333}
3334
3335/*
3336 * call-seq:
3337 * e.chain(*enums) -> enumerator
3338 *
3339 * Returns an enumerator object generated from this enumerator and
3340 * given enumerables.
3341 *
3342 * e = (1..3).chain([4, 5])
3343 * e.to_a #=> [1, 2, 3, 4, 5]
3344 */
3345static VALUE
3346enum_chain(int argc, VALUE *argv, VALUE obj)
3347{
3348 VALUE enums = rb_ary_new_from_values(1, &obj);
3349 rb_ary_cat(enums, argv, argc);
3350 return new_enum_chain(enums);
3351}
3352
3353/*
3354 * call-seq:
3355 * e + enum -> enumerator
3356 *
3357 * Returns an enumerator object generated from this enumerator and a
3358 * given enumerable.
3359 *
3360 * e = (1..3).each + [4, 5]
3361 * e.to_a #=> [1, 2, 3, 4, 5]
3362 */
3363static VALUE
3364enumerator_plus(VALUE obj, VALUE eobj)
3365{
3366 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3367}
3368
3369/*
3370 * call-seq:
3371 * e.to_set -> set
3372 *
3373 * Returns a set generated from this enumerator.
3374 *
3375 * e = Enumerator.new { |y| y << 1 << 1 << 2 << 3 << 5 }
3376 * e.to_set #=> #<Set: {1, 2, 3, 5}>
3377 */
3378static VALUE enumerator_to_set(int argc, VALUE *argv, VALUE obj)
3379{
3380 VALUE size = rb_funcall(obj, id_size, 0);
3381 if (RB_TYPE_P(size, T_FLOAT) && RFLOAT_VALUE(size) == INFINITY) {
3382 rb_raise(rb_eArgError, "cannot convert an infinite enumerator to a set");
3383 }
3384 return rb_call_super(argc, argv);
3385}
3386
3387/*
3388 * Document-class: Enumerator::Product
3389 *
3390 * Enumerator::Product generates a Cartesian product of any number of
3391 * enumerable objects. Iterating over the product of enumerable
3392 * objects is roughly equivalent to nested each_entry loops where the
3393 * loop for the rightmost object is put innermost.
3394 *
3395 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3396 *
3397 * innings.each do |i, h|
3398 * p [i, h]
3399 * end
3400 * # [1, "top"]
3401 * # [1, "bottom"]
3402 * # [2, "top"]
3403 * # [2, "bottom"]
3404 * # [3, "top"]
3405 * # [3, "bottom"]
3406 * # ...
3407 * # [9, "top"]
3408 * # [9, "bottom"]
3409 *
3410 * The method used against each enumerable object is `each_entry`
3411 * instead of `each` so that the product of N enumerable objects
3412 * yields an array of exactly N elements in each iteration.
3413 *
3414 * When no enumerator is given, it calls a given block once yielding
3415 * an empty argument list.
3416 *
3417 * This type of objects can be created by Enumerator.product.
3418 */
3419
3420static void
3421enum_product_mark_and_move(void *p)
3422{
3423 struct enum_product *ptr = p;
3424 rb_gc_mark_and_move(&ptr->enums);
3425}
3426
3427#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3428
3429static size_t
3430enum_product_memsize(const void *p)
3431{
3432 return sizeof(struct enum_product);
3433}
3434
3435static const rb_data_type_t enum_product_data_type = {
3436 "product",
3437 {
3438 enum_product_mark_and_move,
3439 enum_product_free,
3440 enum_product_memsize,
3441 enum_product_mark_and_move,
3442 },
3443 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3444};
3445
3446static struct enum_product *
3447enum_product_ptr(VALUE obj)
3448{
3449 struct enum_product *ptr;
3450
3451 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3452 if (!ptr || UNDEF_P(ptr->enums)) {
3453 rb_raise(rb_eArgError, "uninitialized product");
3454 }
3455 return ptr;
3456}
3457
3458/* :nodoc: */
3459static VALUE
3460enum_product_allocate(VALUE klass)
3461{
3462 struct enum_product *ptr;
3463 VALUE obj;
3464
3465 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3466 ptr->enums = Qundef;
3467
3468 return obj;
3469}
3470
3471/*
3472 * call-seq:
3473 * Enumerator::Product.new(*enums) -> enum
3474 *
3475 * Generates a new enumerator object that generates a Cartesian
3476 * product of given enumerable objects.
3477 *
3478 * e = Enumerator::Product.new(1..3, [4, 5])
3479 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3480 * e.size #=> 6
3481 */
3482static VALUE
3483enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3484{
3485 struct enum_product *ptr;
3486 VALUE enums = Qnil, options = Qnil;
3487
3488 rb_scan_args(argc, argv, "*:", &enums, &options);
3489
3490 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3491 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3492 }
3493
3494 rb_check_frozen(obj);
3495 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3496
3497 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3498
3499 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3500
3501 return obj;
3502}
3503
3504/* :nodoc: */
3505static VALUE
3506enum_product_init_copy(VALUE obj, VALUE orig)
3507{
3508 struct enum_product *ptr0, *ptr1;
3509
3510 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3511 ptr0 = enum_product_ptr(orig);
3512
3513 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3514
3515 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3516
3517 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3518
3519 return obj;
3520}
3521
3522static VALUE
3523enum_product_total_size(VALUE enums)
3524{
3525 VALUE total = INT2FIX(1);
3526 VALUE sizes = rb_ary_hidden_new(RARRAY_LEN(enums));
3527 long i;
3528
3529 for (i = 0; i < RARRAY_LEN(enums); i++) {
3530 VALUE size = enum_size(RARRAY_AREF(enums, i));
3531 if (size == INT2FIX(0)) {
3532 rb_ary_resize(sizes, 0);
3533 return size;
3534 }
3535 rb_ary_push(sizes, size);
3536 }
3537 for (i = 0; i < RARRAY_LEN(sizes); i++) {
3538 VALUE size = RARRAY_AREF(sizes, i);
3539
3540 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3541 return size;
3542 }
3543 if (!RB_INTEGER_TYPE_P(size)) {
3544 return Qnil;
3545 }
3546
3547 total = rb_funcall(total, '*', 1, size);
3548 }
3549
3550 return total;
3551}
3552
3553/*
3554 * call-seq:
3555 * obj.size -> int, Float::INFINITY or nil
3556 *
3557 * Returns the total size of the enumerator product calculated by
3558 * multiplying the sizes of enumerables in the product. If any of the
3559 * enumerables reports its size as nil or Float::INFINITY, that value
3560 * is returned as the size.
3561 */
3562static VALUE
3563enum_product_size(VALUE obj)
3564{
3565 return enum_product_total_size(enum_product_ptr(obj)->enums);
3566}
3567
3568static VALUE
3569enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3570{
3571 return enum_product_size(obj);
3572}
3573
3575 VALUE obj;
3576 VALUE block;
3577 int argc;
3578 VALUE *argv;
3579 int index;
3580};
3581
3582static VALUE product_each(VALUE, struct product_state *);
3583
3584static VALUE
3585product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3586{
3587 struct product_state *pstate = (struct product_state *)state;
3588 pstate->argv[pstate->index++] = value;
3589
3590 VALUE val = product_each(pstate->obj, pstate);
3591 pstate->index--;
3592 return val;
3593}
3594
3595static VALUE
3596product_each(VALUE obj, struct product_state *pstate)
3597{
3598 struct enum_product *ptr = enum_product_ptr(obj);
3599 VALUE enums = ptr->enums;
3600
3601 if (pstate->index < pstate->argc) {
3602 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3603
3604 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3605 }
3606 else {
3607 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3608 }
3609
3610 return obj;
3611}
3612
3613static VALUE
3614enum_product_run(VALUE obj, VALUE block)
3615{
3616 struct enum_product *ptr = enum_product_ptr(obj);
3617 int argc = RARRAY_LENINT(ptr->enums);
3618 struct product_state state = {
3619 .obj = obj,
3620 .block = block,
3621 .index = 0,
3622 .argc = argc,
3623 .argv = ALLOCA_N(VALUE, argc),
3624 };
3625
3626 return product_each(obj, &state);
3627}
3628
3629/*
3630 * call-seq:
3631 * obj.each { |...| ... } -> obj
3632 * obj.each -> enumerator
3633 *
3634 * Iterates over the elements of the first enumerable by calling the
3635 * "each_entry" method on it with the given arguments, then proceeds
3636 * to the following enumerables in sequence until all of the
3637 * enumerables are exhausted.
3638 *
3639 * If no block is given, returns an enumerator. Otherwise, returns self.
3640 */
3641static VALUE
3642enum_product_each(VALUE obj)
3643{
3644 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3645
3646 return enum_product_run(obj, rb_block_proc());
3647}
3648
3649/*
3650 * call-seq:
3651 * obj.rewind -> obj
3652 *
3653 * Rewinds the product enumerator by calling the "rewind" method on
3654 * each enumerable in reverse order. Each call is performed only if
3655 * the enumerable responds to the method.
3656 */
3657static VALUE
3658enum_product_rewind(VALUE obj)
3659{
3660 struct enum_product *ptr = enum_product_ptr(obj);
3661 VALUE enums = ptr->enums;
3662 long i;
3663
3664 for (i = 0; i < RARRAY_LEN(enums); i++) {
3665 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3666 }
3667
3668 return obj;
3669}
3670
3671static VALUE
3672inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3673{
3674 VALUE klass = rb_obj_class(obj);
3675 struct enum_product *ptr;
3676
3677 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3678
3679 if (!ptr || UNDEF_P(ptr->enums)) {
3680 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3681 }
3682
3683 if (recur) {
3684 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3685 }
3686
3687 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3688}
3689
3690/*
3691 * call-seq:
3692 * obj.inspect -> string
3693 *
3694 * Returns a printable version of the product enumerator.
3695 */
3696static VALUE
3697enum_product_inspect(VALUE obj)
3698{
3699 return rb_exec_recursive(inspect_enum_product, obj, 0);
3700}
3701
3702/*
3703 * call-seq:
3704 * Enumerator.product(*enums) -> enumerator
3705 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3706 *
3707 * Generates a new enumerator object that generates a Cartesian
3708 * product of given enumerable objects. This is equivalent to
3709 * Enumerator::Product.new.
3710 *
3711 * e = Enumerator.product(1..3, [4, 5])
3712 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3713 * e.size #=> 6
3714 *
3715 * When a block is given, calls the block with each N-element array
3716 * generated and returns +nil+.
3717 */
3718static VALUE
3719enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3720{
3721 VALUE enums = Qnil, options = Qnil, block = Qnil;
3722
3723 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3724
3725 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3726 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3727 }
3728
3729 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3730
3731 if (!NIL_P(block)) {
3732 enum_product_run(obj, block);
3733 return Qnil;
3734 }
3735
3736 return obj;
3737}
3738
3740 struct enumerator enumerator;
3741 VALUE begin;
3742 VALUE end;
3743 VALUE step;
3744 bool exclude_end;
3745};
3746
3747RUBY_REFERENCES(arith_seq_refs) = {
3748 RUBY_REF_EDGE(struct enumerator, obj),
3749 RUBY_REF_EDGE(struct enumerator, args),
3750 RUBY_REF_EDGE(struct enumerator, fib),
3751 RUBY_REF_EDGE(struct enumerator, dst),
3752 RUBY_REF_EDGE(struct enumerator, lookahead),
3753 RUBY_REF_EDGE(struct enumerator, feedvalue),
3754 RUBY_REF_EDGE(struct enumerator, stop_exc),
3755 RUBY_REF_EDGE(struct enumerator, size),
3756 RUBY_REF_EDGE(struct enumerator, procs),
3757
3758 RUBY_REF_EDGE(struct arith_seq, begin),
3759 RUBY_REF_EDGE(struct arith_seq, end),
3760 RUBY_REF_EDGE(struct arith_seq, step),
3761 RUBY_REF_END
3762};
3763
3764static const rb_data_type_t arith_seq_data_type = {
3765 "arithmetic_sequence",
3766 {
3767 RUBY_REFS_LIST_PTR(arith_seq_refs),
3769 NULL, // Nothing allocated externally, so don't need a memsize function
3770 NULL,
3771 },
3772 .parent = &enumerator_data_type,
3773 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
3774};
3775
3776static VALUE
3777arith_seq_allocate(VALUE klass)
3778{
3779 struct arith_seq *ptr;
3780 VALUE enum_obj;
3781
3782 enum_obj = TypedData_Make_Struct(klass, struct arith_seq, &arith_seq_data_type, ptr);
3783 ptr->enumerator.obj = Qundef;
3784
3785 return enum_obj;
3786}
3787
3788/*
3789 * Document-class: Enumerator::ArithmeticSequence
3790 *
3791 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3792 * that is a representation of sequences of numbers with common difference.
3793 * Instances of this class can be generated by the Range#step and Numeric#step
3794 * methods.
3795 *
3796 * The class can be used for slicing Array (see Array#slice) or custom
3797 * collections.
3798 */
3799
3800VALUE
3801rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3802 rb_enumerator_size_func *size_fn,
3803 VALUE beg, VALUE end, VALUE step, int excl)
3804{
3805 VALUE aseq = enumerator_init(arith_seq_allocate(rb_cArithSeq),
3806 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3807 struct arith_seq *ptr;
3808 TypedData_Get_Struct(aseq, struct arith_seq, &enumerator_data_type, ptr);
3809
3810 RB_OBJ_WRITE(aseq, &ptr->begin, beg);
3811 RB_OBJ_WRITE(aseq, &ptr->end, end);
3812 RB_OBJ_WRITE(aseq, &ptr->step, step);
3813 ptr->exclude_end = excl;
3814
3815 return aseq;
3816}
3817
3818/*
3819 * call-seq: aseq.begin -> num or nil
3820 *
3821 * Returns the number that defines the first element of this arithmetic
3822 * sequence.
3823 */
3824static inline VALUE
3825arith_seq_begin(VALUE self)
3826{
3827 struct arith_seq *ptr;
3828 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3829 return ptr->begin;
3830}
3831
3832/*
3833 * call-seq: aseq.end -> num or nil
3834 *
3835 * Returns the number that defines the end of this arithmetic sequence.
3836 */
3837static inline VALUE
3838arith_seq_end(VALUE self)
3839{
3840 struct arith_seq *ptr;
3841 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3842 return ptr->end;
3843}
3844
3845/*
3846 * call-seq: aseq.step -> num
3847 *
3848 * Returns the number that defines the common difference between
3849 * two adjacent elements in this arithmetic sequence.
3850 */
3851static inline VALUE
3852arith_seq_step(VALUE self)
3853{
3854 struct arith_seq *ptr;
3855 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3856 return ptr->step;
3857}
3858
3859/*
3860 * call-seq: aseq.exclude_end? -> true or false
3861 *
3862 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3863 */
3864static inline VALUE
3865arith_seq_exclude_end(VALUE self)
3866{
3867 struct arith_seq *ptr;
3868 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3869 return RBOOL(ptr->exclude_end);
3870}
3871
3872static inline int
3873arith_seq_exclude_end_p(VALUE self)
3874{
3875 struct arith_seq *ptr;
3876 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3877 return ptr->exclude_end;
3878}
3879
3880int
3881rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3882{
3883 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3884 component->begin = arith_seq_begin(obj);
3885 component->end = arith_seq_end(obj);
3886 component->step = arith_seq_step(obj);
3887 component->exclude_end = arith_seq_exclude_end_p(obj);
3888 return 1;
3889 }
3890 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3891 component->step = INT2FIX(1);
3892 return 1;
3893 }
3894
3895 return 0;
3896}
3897
3898VALUE
3899rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3900{
3901 RBIMPL_NONNULL_ARG(begp);
3902 RBIMPL_NONNULL_ARG(lenp);
3903 RBIMPL_NONNULL_ARG(stepp);
3904
3906 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3907 return Qfalse;
3908 }
3909
3910 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3911 *stepp = step;
3912
3913 if (step < 0) {
3914 if (aseq.exclude_end && !NIL_P(aseq.end)) {
3915 /* Handle exclusion before range reversal */
3916 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
3917
3918 /* Don't exclude the previous beginning */
3919 aseq.exclude_end = 0;
3920 }
3921 VALUE tmp = aseq.begin;
3922 aseq.begin = aseq.end;
3923 aseq.end = tmp;
3924 }
3925
3926 if (err == 0 && (step < -1 || step > 1)) {
3927 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
3928 if (*begp > len)
3929 goto out_of_range;
3930 if (*lenp > len)
3931 goto out_of_range;
3932 return Qtrue;
3933 }
3934 }
3935 else {
3936 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
3937 }
3938
3939 out_of_range:
3940 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
3941 return Qnil;
3942}
3943
3944static VALUE
3945arith_seq_take(VALUE self, VALUE num)
3946{
3947 VALUE b, e, s, ary;
3948 long n;
3949 int x;
3950
3951 n = NUM2LONG(num);
3952 if (n < 0) {
3953 rb_raise(rb_eArgError, "attempt to take negative size");
3954 }
3955 if (n == 0) {
3956 return rb_ary_new_capa(0);
3957 }
3958
3959 b = arith_seq_begin(self);
3960 e = arith_seq_end(self);
3961 s = arith_seq_step(self);
3962 x = arith_seq_exclude_end_p(self);
3963
3964 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
3965 long i = FIX2LONG(b), unit = FIX2LONG(s);
3966 ary = rb_ary_new_capa(n);
3967 while (n > 0 && FIXABLE(i)) {
3968 rb_ary_push(ary, LONG2FIX(i));
3969 i += unit; // FIXABLE + FIXABLE never overflow;
3970 --n;
3971 }
3972 if (n > 0) {
3973 b = LONG2NUM(i);
3974 while (n > 0) {
3975 rb_ary_push(ary, b);
3976 b = rb_big_plus(b, s);
3977 --n;
3978 }
3979 }
3980 return ary;
3981 }
3982 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
3983 long i = FIX2LONG(b);
3984 long end = FIX2LONG(e);
3985 long unit = FIX2LONG(s);
3986 long len;
3987
3988 if (unit >= 0) {
3989 if (!x) end += 1;
3990
3991 len = end - i;
3992 if (len < 0) len = 0;
3993 ary = rb_ary_new_capa((n < len) ? n : len);
3994 while (n > 0 && i < end) {
3995 rb_ary_push(ary, LONG2FIX(i));
3996 if (i + unit < i) break;
3997 i += unit;
3998 --n;
3999 }
4000 }
4001 else {
4002 if (!x) end -= 1;
4003
4004 len = i - end;
4005 if (len < 0) len = 0;
4006 ary = rb_ary_new_capa((n < len) ? n : len);
4007 while (n > 0 && i > end) {
4008 rb_ary_push(ary, LONG2FIX(i));
4009 if (i + unit > i) break;
4010 i += unit;
4011 --n;
4012 }
4013 }
4014 return ary;
4015 }
4016 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4017 /* generate values like ruby_float_step */
4018
4019 double unit = NUM2DBL(s);
4020 double beg = NUM2DBL(b);
4021 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
4022 double len = ruby_float_step_size(beg, end, unit, x);
4023 long i;
4024
4025 if (n > len)
4026 n = (long)len;
4027
4028 if (isinf(unit)) {
4029 if (len > 0) {
4030 ary = rb_ary_new_capa(1);
4031 rb_ary_push(ary, DBL2NUM(beg));
4032 }
4033 else {
4034 ary = rb_ary_new_capa(0);
4035 }
4036 }
4037 else if (unit == 0) {
4038 VALUE val = DBL2NUM(beg);
4039 ary = rb_ary_new_capa(n);
4040 for (i = 0; i < len; ++i) {
4041 rb_ary_push(ary, val);
4042 }
4043 }
4044 else {
4045 ary = rb_ary_new_capa(n);
4046 for (i = 0; i < n; ++i) {
4047 double d = i*unit+beg;
4048 if (unit >= 0 ? end < d : d < end) d = end;
4049 rb_ary_push(ary, DBL2NUM(d));
4050 }
4051 }
4052
4053 return ary;
4054 }
4055
4056 {
4057 VALUE argv[1];
4058 argv[0] = num;
4059 return rb_call_super(1, argv);
4060 }
4061}
4062
4063/*
4064 * call-seq:
4065 * aseq.first -> num or nil
4066 * aseq.first(n) -> an_array
4067 *
4068 * Returns the first number in this arithmetic sequence,
4069 * or an array of the first +n+ elements.
4070 */
4071static VALUE
4072arith_seq_first(int argc, VALUE *argv, VALUE self)
4073{
4074 VALUE b, e, s;
4075
4076 rb_check_arity(argc, 0, 1);
4077
4078 b = arith_seq_begin(self);
4079 e = arith_seq_end(self);
4080 s = arith_seq_step(self);
4081 if (argc == 0) {
4082 if (NIL_P(b)) {
4083 return Qnil;
4084 }
4085 if (!NIL_P(e)) {
4086 VALUE zero = INT2FIX(0);
4087 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
4088 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
4089 return Qnil;
4090 }
4091 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
4092 return Qnil;
4093 }
4094 }
4095 return b;
4096 }
4097
4098 return arith_seq_take(self, argv[0]);
4099}
4100
4101static inline VALUE
4102num_plus(VALUE a, VALUE b)
4103{
4104 if (RB_INTEGER_TYPE_P(a)) {
4105 return rb_int_plus(a, b);
4106 }
4107 else if (RB_FLOAT_TYPE_P(a)) {
4108 return rb_float_plus(a, b);
4109 }
4110 else if (RB_TYPE_P(a, T_RATIONAL)) {
4111 return rb_rational_plus(a, b);
4112 }
4113 else {
4114 return rb_funcallv(a, '+', 1, &b);
4115 }
4116}
4117
4118static inline VALUE
4119num_minus(VALUE a, VALUE b)
4120{
4121 if (RB_INTEGER_TYPE_P(a)) {
4122 return rb_int_minus(a, b);
4123 }
4124 else if (RB_FLOAT_TYPE_P(a)) {
4125 return rb_float_minus(a, b);
4126 }
4127 else if (RB_TYPE_P(a, T_RATIONAL)) {
4128 return rb_rational_minus(a, b);
4129 }
4130 else {
4131 return rb_funcallv(a, '-', 1, &b);
4132 }
4133}
4134
4135static inline VALUE
4136num_mul(VALUE a, VALUE b)
4137{
4138 if (RB_INTEGER_TYPE_P(a)) {
4139 return rb_int_mul(a, b);
4140 }
4141 else if (RB_FLOAT_TYPE_P(a)) {
4142 return rb_float_mul(a, b);
4143 }
4144 else if (RB_TYPE_P(a, T_RATIONAL)) {
4145 return rb_rational_mul(a, b);
4146 }
4147 else {
4148 return rb_funcallv(a, '*', 1, &b);
4149 }
4150}
4151
4152static inline VALUE
4153num_idiv(VALUE a, VALUE b)
4154{
4155 VALUE q;
4156 if (RB_INTEGER_TYPE_P(a)) {
4157 q = rb_int_idiv(a, b);
4158 }
4159 else if (RB_FLOAT_TYPE_P(a)) {
4160 q = rb_float_div(a, b);
4161 }
4162 else if (RB_TYPE_P(a, T_RATIONAL)) {
4163 q = rb_rational_div(a, b);
4164 }
4165 else {
4166 q = rb_funcallv(a, idDiv, 1, &b);
4167 }
4168
4169 if (RB_INTEGER_TYPE_P(q)) {
4170 return q;
4171 }
4172 else if (RB_FLOAT_TYPE_P(q)) {
4173 return rb_float_floor(q, 0);
4174 }
4175 else if (RB_TYPE_P(q, T_RATIONAL)) {
4176 return rb_rational_floor(q, 0);
4177 }
4178 else {
4179 return rb_funcall(q, rb_intern("floor"), 0);
4180 }
4181}
4182
4183/*
4184 * call-seq:
4185 * aseq.last -> num or nil
4186 * aseq.last(n) -> an_array
4187 *
4188 * Returns the last number in this arithmetic sequence,
4189 * or an array of the last +n+ elements.
4190 */
4191static VALUE
4192arith_seq_last(int argc, VALUE *argv, VALUE self)
4193{
4194 VALUE b, e, s, len_1, len, last, nv, ary;
4195 int last_is_adjusted;
4196 long n;
4197
4198 e = arith_seq_end(self);
4199 if (NIL_P(e)) {
4200 rb_raise(rb_eRangeError,
4201 "cannot get the last element of endless arithmetic sequence");
4202 }
4203
4204 b = arith_seq_begin(self);
4205 s = arith_seq_step(self);
4206
4207 len_1 = num_idiv(num_minus(e, b), s);
4208 if (rb_num_negative_int_p(len_1)) {
4209 if (argc == 0) {
4210 return Qnil;
4211 }
4212 return rb_ary_new_capa(0);
4213 }
4214
4215 last = num_plus(b, num_mul(s, len_1));
4216 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4217 last = num_minus(last, s);
4218 }
4219
4220 if (argc == 0) {
4221 return last;
4222 }
4223
4224 if (last_is_adjusted) {
4225 len = len_1;
4226 }
4227 else {
4228 len = rb_int_plus(len_1, INT2FIX(1));
4229 }
4230
4231 rb_scan_args(argc, argv, "1", &nv);
4232 if (!RB_INTEGER_TYPE_P(nv)) {
4233 nv = rb_to_int(nv);
4234 }
4235 if (RTEST(rb_int_gt(nv, len))) {
4236 nv = len;
4237 }
4238 n = NUM2LONG(nv);
4239 if (n < 0) {
4240 rb_raise(rb_eArgError, "negative array size");
4241 }
4242
4243 ary = rb_ary_new_capa(n);
4244 b = rb_int_minus(last, rb_int_mul(s, nv));
4245 while (n) {
4246 b = rb_int_plus(b, s);
4247 rb_ary_push(ary, b);
4248 --n;
4249 }
4250
4251 return ary;
4252}
4253
4254/*
4255 * call-seq:
4256 * aseq.inspect -> string
4257 *
4258 * Convert this arithmetic sequence to a printable form.
4259 */
4260static VALUE
4261arith_seq_inspect(VALUE self)
4262{
4263 struct enumerator *e;
4264 VALUE eobj, str, eargs;
4265 int range_p;
4266
4267 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4268
4269 eobj = rb_attr_get(self, id_receiver);
4270 if (NIL_P(eobj)) {
4271 eobj = e->obj;
4272 }
4273
4274 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4275 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4276
4277 rb_str_buf_append(str, rb_id2str(e->meth));
4278
4279 eargs = rb_attr_get(eobj, id_arguments);
4280 if (NIL_P(eargs)) {
4281 eargs = e->args;
4282 }
4283 if (eargs != Qfalse) {
4284 long argc = RARRAY_LEN(eargs);
4285 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
4286
4287 if (argc > 0) {
4288 VALUE kwds = Qnil;
4289
4290 rb_str_buf_cat2(str, "(");
4291
4292 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
4293 int all_key = TRUE;
4294 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
4295 if (all_key) kwds = argv[--argc];
4296 }
4297
4298 while (argc--) {
4299 VALUE arg = *argv++;
4300
4301 rb_str_append(str, rb_inspect(arg));
4302 rb_str_buf_cat2(str, ", ");
4303 }
4304 if (!NIL_P(kwds)) {
4305 rb_hash_foreach(kwds, kwd_append, str);
4306 }
4307 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
4308 rb_str_buf_cat2(str, ")");
4309 }
4310 }
4311
4312 rb_str_buf_cat2(str, ")");
4313
4314 return str;
4315}
4316
4317/*
4318 * call-seq:
4319 * aseq == obj -> true or false
4320 *
4321 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4322 * has equivalent begin, end, step, and exclude_end? settings.
4323 */
4324static VALUE
4325arith_seq_eq(VALUE self, VALUE other)
4326{
4327 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4328 return Qfalse;
4329 }
4330
4331 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4332 return Qfalse;
4333 }
4334
4335 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4336 return Qfalse;
4337 }
4338
4339 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4340 return Qfalse;
4341 }
4342
4343 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4344 return Qfalse;
4345 }
4346
4347 return Qtrue;
4348}
4349
4350/*
4351 * call-seq:
4352 * aseq.hash -> integer
4353 *
4354 * Compute a hash-value for this arithmetic sequence.
4355 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4356 * values will generate the same hash-value.
4357 *
4358 * See also Object#hash.
4359 */
4360static VALUE
4361arith_seq_hash(VALUE self)
4362{
4363 st_index_t hash;
4364 VALUE v;
4365
4366 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4367 v = rb_hash(arith_seq_begin(self));
4368 hash = rb_hash_uint(hash, NUM2LONG(v));
4369 v = rb_hash(arith_seq_end(self));
4370 hash = rb_hash_uint(hash, NUM2LONG(v));
4371 v = rb_hash(arith_seq_step(self));
4372 hash = rb_hash_uint(hash, NUM2LONG(v));
4373 hash = rb_hash_end(hash);
4374
4375 return ST2FIX(hash);
4376}
4377
4378#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4379
4381 VALUE current;
4382 VALUE end;
4383 VALUE step;
4384 int excl;
4385};
4386
4387/*
4388 * call-seq:
4389 * aseq.each {|i| block } -> aseq
4390 * aseq.each -> aseq
4391 */
4392static VALUE
4393arith_seq_each(VALUE self)
4394{
4395 VALUE c, e, s, len_1, last;
4396 int x;
4397
4398 if (!rb_block_given_p()) return self;
4399
4400 c = arith_seq_begin(self);
4401 e = arith_seq_end(self);
4402 s = arith_seq_step(self);
4403 x = arith_seq_exclude_end_p(self);
4404
4405 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4406 return self;
4407 }
4408
4409 if (NIL_P(e)) {
4410 while (1) {
4411 rb_yield(c);
4412 c = rb_int_plus(c, s);
4413 }
4414
4415 return self;
4416 }
4417
4418 if (rb_equal(s, INT2FIX(0))) {
4419 while (1) {
4420 rb_yield(c);
4421 }
4422
4423 return self;
4424 }
4425
4426 len_1 = num_idiv(num_minus(e, c), s);
4427 last = num_plus(c, num_mul(s, len_1));
4428 if (x && rb_equal(last, e)) {
4429 last = num_minus(last, s);
4430 }
4431
4432 if (rb_num_negative_int_p(s)) {
4433 while (NUM_GE(c, last)) {
4434 rb_yield(c);
4435 c = num_plus(c, s);
4436 }
4437 }
4438 else {
4439 while (NUM_GE(last, c)) {
4440 rb_yield(c);
4441 c = num_plus(c, s);
4442 }
4443 }
4444
4445 return self;
4446}
4447
4448/*
4449 * call-seq:
4450 * aseq.size -> num or nil
4451 *
4452 * Returns the number of elements in this arithmetic sequence if it is a finite
4453 * sequence. Otherwise, returns <code>nil</code>.
4454 */
4455static VALUE
4456arith_seq_size(VALUE self)
4457{
4458 VALUE b, e, s, len_1, len, last;
4459 int x;
4460
4461 b = arith_seq_begin(self);
4462 e = arith_seq_end(self);
4463 s = arith_seq_step(self);
4464 x = arith_seq_exclude_end_p(self);
4465
4466 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4467 double ee, n;
4468
4469 if (NIL_P(e)) {
4470 if (rb_num_negative_int_p(s)) {
4471 ee = -HUGE_VAL;
4472 }
4473 else {
4474 ee = HUGE_VAL;
4475 }
4476 }
4477 else {
4478 ee = NUM2DBL(e);
4479 }
4480
4481 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4482 if (isinf(n)) return DBL2NUM(n);
4483 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4484 return rb_dbl2big(n);
4485 }
4486
4487 if (NIL_P(e)) {
4488 return DBL2NUM(HUGE_VAL);
4489 }
4490
4491 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4492 s = rb_to_int(s);
4493 }
4494
4495 if (rb_equal(s, INT2FIX(0))) {
4496 return DBL2NUM(HUGE_VAL);
4497 }
4498
4499 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4500 if (rb_num_negative_int_p(len_1)) {
4501 return INT2FIX(0);
4502 }
4503
4504 last = rb_int_plus(b, rb_int_mul(s, len_1));
4505 if (x && rb_equal(last, e)) {
4506 len = len_1;
4507 }
4508 else {
4509 len = rb_int_plus(len_1, INT2FIX(1));
4510 }
4511
4512 return len;
4513}
4514
4515#define sym(name) ID2SYM(rb_intern_const(name))
4516void
4517InitVM_Enumerator(void)
4518{
4519 ID id_private = rb_intern_const("private");
4520
4521 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4522 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4523
4524 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4526
4527 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4528 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4529 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4530 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4531 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4532 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4533 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4534 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4535 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4536 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4537 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4538 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4539 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4540 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4541 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4542 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4543 rb_define_method(rb_cEnumerator, "to_set", enumerator_to_set, -1);
4544 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4546
4547 /* Lazy */
4549 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4550
4551 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4552 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4553 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4554 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4555 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4556 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4557 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4558 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4559 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4560 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4561 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4562 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4563 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4564 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4565 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4566 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4567 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4568 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4569
4570 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4571 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4572 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4573 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4574 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4575 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4576 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4577 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4578 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4579 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4580 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4581 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4582 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4583 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4584 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4585 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4586 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4587
4588 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4589 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4590 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4591 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4592 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4593 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4594 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4595 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4596 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4597 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4598 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4599 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4600 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4601 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4602 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4603 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4604 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4605 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4606 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4607 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4608 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4609 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4610 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4611 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4612 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4613 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4614 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4615 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4616 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4617
4618 lazy_use_super_method = rb_hash_new_with_size(18);
4619 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4620 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4621 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4622 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4623 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4624 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4625 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4626 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4627 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4628 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4629 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4630 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4631 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4632 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4633 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4634 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4635 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4636 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4637 rb_obj_freeze(lazy_use_super_method);
4638 rb_vm_register_global_object(lazy_use_super_method);
4639
4640#if 0 /* for RDoc */
4641 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4642 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4643 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4644 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4645 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4646 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4647#endif
4648 rb_define_alias(rb_cLazy, "force", "to_a");
4649
4651 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4652
4653 /* Generator */
4654 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4655 rb_include_module(rb_cGenerator, rb_mEnumerable);
4656 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4657 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4658 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4659 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4660
4661 /* Yielder */
4662 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4663 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4664 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4665 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4666 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4667 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4668
4669 /* Producer */
4670 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4671 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4672 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4673 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4674
4675 /* Chain */
4676 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4677 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4678 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4679 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4680 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4681 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4682 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4683 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4684 rb_undef_method(rb_cEnumChain, "feed");
4685 rb_undef_method(rb_cEnumChain, "next");
4686 rb_undef_method(rb_cEnumChain, "next_values");
4687 rb_undef_method(rb_cEnumChain, "peek");
4688 rb_undef_method(rb_cEnumChain, "peek_values");
4689
4690 /* Product */
4691 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4692 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4693 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4694 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4695 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4696 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4697 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4698 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4699 rb_undef_method(rb_cEnumProduct, "feed");
4700 rb_undef_method(rb_cEnumProduct, "next");
4701 rb_undef_method(rb_cEnumProduct, "next_values");
4702 rb_undef_method(rb_cEnumProduct, "peek");
4703 rb_undef_method(rb_cEnumProduct, "peek_values");
4704 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4705
4706 /* ArithmeticSequence */
4707 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4708 rb_undef_alloc_func(rb_cArithSeq);
4709 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4710 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4711 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4712 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4713 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4714 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4715 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4716 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4717 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4718 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4719 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4720 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4721 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4722 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4723
4724 rb_provide("enumerator.so"); /* for backward compatibility */
4725}
4726#undef sym
4727
4728void
4729Init_Enumerator(void)
4730{
4731 id_rewind = rb_intern_const("rewind");
4732 id_next = rb_intern_const("next");
4733 id_result = rb_intern_const("result");
4734 id_receiver = rb_intern_const("receiver");
4735 id_arguments = rb_intern_const("arguments");
4736 id_memo = rb_intern_const("memo");
4737 id_method = rb_intern_const("method");
4738 id_force = rb_intern_const("force");
4739 id_to_enum = rb_intern_const("to_enum");
4740 id_each_entry = rb_intern_const("each_entry");
4741 sym_each = ID2SYM(id_each);
4742 sym_yield = ID2SYM(rb_intern_const("yield"));
4743
4744 InitVM(Enumerator);
4745}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1795
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:1588
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:1619
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2947
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:1028
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2767
int rb_scan_args_kw(int kw_flag, int argc, const VALUE *argv, const char *fmt,...)
Identical to rb_scan_args(), except it also accepts kw_splat.
Definition class.c:3250
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3237
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:1020
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:1007
int rb_get_kwargs(VALUE keyword_hash, const ID *table, int required, int optional, VALUE *values)
Keyword argument deconstructor.
Definition class.c:3026
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1681
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:205
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:659
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:405
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:657
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:406
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:653
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:2277
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1435
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1431
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1429
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:180
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1482
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1433
VALUE rb_mKernel
Kernel module.
Definition object.c:60
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:163
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:100
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:196
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:264
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:582
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:686
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:176
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:923
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1342
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3289
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1117
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1194
#define rb_funcall2
Definition eval.h:207
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:362
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_resize(VALUE ary, long len)
Expands or shrinks the passed array to the passed length.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:208
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:695
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:484
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2269
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:983
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1165
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:1126
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:120
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1848
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:941
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:944
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3795
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:1994
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3761
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3385
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1776
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2013
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1488
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:380
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3382
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1630
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:686
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:680
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:284
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:993
ID rb_to_id(VALUE str)
Definition string.c:12648
int len
Length of the buffer.
Definition io.h:8
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1395
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1417
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1372
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1423
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1570
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:372
#define ALLOCA_N(type, n)
Definition memory.h:292
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
#define RUBY_TYPED_DEFAULT_FREE
This is a value you can set to rb_data_type_struct::dfree.
Definition rtypeddata.h:80
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:638
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:508
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:231
#define RB_SCAN_ARGS_LAST_HASH_KEYWORDS
Treat a final argument as keywords if it is a hash, and not as keywords otherwise.
Definition scan_args.h:59
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:104
Definition enumerator.c:236
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:208
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:317
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:264
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376