Ruby 3.5.0dev (2025-02-19 revision 27ba268b75bbe461460b31426e377b42d4935f70)
enumerator.c (27ba268b75bbe461460b31426e377b42d4935f70)
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include "id.h"
22#include "internal.h"
23#include "internal/class.h"
24#include "internal/enumerator.h"
25#include "internal/error.h"
26#include "internal/hash.h"
27#include "internal/imemo.h"
28#include "internal/numeric.h"
29#include "internal/range.h"
30#include "internal/rational.h"
31#include "ruby/ruby.h"
32
33/*
34 * Document-class: Enumerator
35 *
36 * A class which allows both internal and external iteration.
37 *
38 * An Enumerator can be created by the following methods.
39 * - Object#to_enum
40 * - Object#enum_for
41 * - Enumerator.new
42 *
43 * Most methods have two forms: a block form where the contents
44 * are evaluated for each item in the enumeration, and a non-block form
45 * which returns a new Enumerator wrapping the iteration.
46 *
47 * enumerator = %w(one two three).each
48 * puts enumerator.class # => Enumerator
49 *
50 * enumerator.each_with_object("foo") do |item, obj|
51 * puts "#{obj}: #{item}"
52 * end
53 *
54 * # foo: one
55 * # foo: two
56 * # foo: three
57 *
58 * enum_with_obj = enumerator.each_with_object("foo")
59 * puts enum_with_obj.class # => Enumerator
60 *
61 * enum_with_obj.each do |item, obj|
62 * puts "#{obj}: #{item}"
63 * end
64 *
65 * # foo: one
66 * # foo: two
67 * # foo: three
68 *
69 * This allows you to chain Enumerators together. For example, you
70 * can map a list's elements to strings containing the index
71 * and the element as a string via:
72 *
73 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
74 * # => ["0:foo", "1:bar", "2:baz"]
75 *
76 * == External Iteration
77 *
78 * An Enumerator can also be used as an external iterator.
79 * For example, Enumerator#next returns the next value of the iterator
80 * or raises StopIteration if the Enumerator is at the end.
81 *
82 * e = [1,2,3].each # returns an enumerator object.
83 * puts e.next # => 1
84 * puts e.next # => 2
85 * puts e.next # => 3
86 * puts e.next # raises StopIteration
87 *
88 * +next+, +next_values+, +peek+, and +peek_values+ are the only methods
89 * which use external iteration (and Array#zip(Enumerable-not-Array) which uses +next+ internally).
90 *
91 * These methods do not affect other internal enumeration methods,
92 * unless the underlying iteration method itself has side-effect, e.g. IO#each_line.
93 *
94 * FrozenError will be raised if these methods are called against a frozen enumerator.
95 * Since +rewind+ and +feed+ also change state for external iteration,
96 * these methods may raise FrozenError too.
97 *
98 * External iteration differs *significantly* from internal iteration
99 * due to using a Fiber:
100 * - The Fiber adds some overhead compared to internal enumeration.
101 * - The stacktrace will only include the stack from the Enumerator, not above.
102 * - Fiber-local variables are *not* inherited inside the Enumerator Fiber,
103 * which instead starts with no Fiber-local variables.
104 * - Fiber storage variables *are* inherited and are designed
105 * to handle Enumerator Fibers. Assigning to a Fiber storage variable
106 * only affects the current Fiber, so if you want to change state
107 * in the caller Fiber of the Enumerator Fiber, you need to use an
108 * extra indirection (e.g., use some object in the Fiber storage
109 * variable and mutate some ivar of it).
110 *
111 * Concretely:
112 *
113 * Thread.current[:fiber_local] = 1
114 * Fiber[:storage_var] = 1
115 * e = Enumerator.new do |y|
116 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
117 * p Fiber[:storage_var] # => 1, inherited
118 * Fiber[:storage_var] += 1
119 * y << 42
120 * end
121 *
122 * p e.next # => 42
123 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
124 *
125 * e.each { p _1 }
126 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
127 *
128 * == Convert External Iteration to Internal Iteration
129 *
130 * You can use an external iterator to implement an internal iterator as follows:
131 *
132 * def ext_each(e)
133 * while true
134 * begin
135 * vs = e.next_values
136 * rescue StopIteration
137 * return $!.result
138 * end
139 * y = yield(*vs)
140 * e.feed y
141 * end
142 * end
143 *
144 * o = Object.new
145 *
146 * def o.each
147 * puts yield
148 * puts yield(1)
149 * puts yield(1, 2)
150 * 3
151 * end
152 *
153 * # use o.each as an internal iterator directly.
154 * puts o.each {|*x| puts x; [:b, *x] }
155 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
156 *
157 * # convert o.each to an external iterator for
158 * # implementing an internal iterator.
159 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
160 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
161 *
162 */
164static VALUE rb_cLazy;
165static ID id_rewind, id_new, id_to_enum, id_each_entry;
166static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
167static ID id_begin, id_end, id_step, id_exclude_end;
168static VALUE sym_each, sym_cycle, sym_yield;
169
170static VALUE lazy_use_super_method;
171
172extern ID ruby_static_id_cause;
173
174#define id_call idCall
175#define id_cause ruby_static_id_cause
176#define id_each idEach
177#define id_eqq idEqq
178#define id_initialize idInitialize
179#define id_size idSize
180
182
184 VALUE obj;
185 ID meth;
186 VALUE args;
187 VALUE fib;
188 VALUE dst;
189 VALUE lookahead;
190 VALUE feedvalue;
191 VALUE stop_exc;
192 VALUE size;
193 VALUE procs;
195 int kw_splat;
196};
197
198RUBY_REFERENCES(enumerator_refs) = {
199 RUBY_REF_EDGE(struct enumerator, obj),
200 RUBY_REF_EDGE(struct enumerator, args),
201 RUBY_REF_EDGE(struct enumerator, fib),
202 RUBY_REF_EDGE(struct enumerator, dst),
203 RUBY_REF_EDGE(struct enumerator, lookahead),
204 RUBY_REF_EDGE(struct enumerator, feedvalue),
205 RUBY_REF_EDGE(struct enumerator, stop_exc),
206 RUBY_REF_EDGE(struct enumerator, size),
207 RUBY_REF_EDGE(struct enumerator, procs),
208 RUBY_REF_END
209};
210
211static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
212
213struct generator {
214 VALUE proc;
215 VALUE obj;
216};
217
218struct yielder {
219 VALUE proc;
220};
221
222struct producer {
223 VALUE init;
224 VALUE proc;
225};
226
227typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
228typedef VALUE lazyenum_size_func(VALUE, VALUE);
229typedef int lazyenum_precheck_func(VALUE proc_entry);
230typedef struct {
231 lazyenum_proc_func *proc;
232 lazyenum_size_func *size;
233 lazyenum_precheck_func *precheck;
235
237 VALUE proc;
238 VALUE memo;
239 const lazyenum_funcs *fn;
240};
241
242static VALUE generator_allocate(VALUE klass);
243static VALUE generator_init(VALUE obj, VALUE proc);
244
245static VALUE rb_cEnumChain;
246
248 VALUE enums;
249 long pos;
250};
251
252static VALUE rb_cEnumProduct;
253
255 VALUE enums;
256};
257
258VALUE rb_cArithSeq;
259
260static const rb_data_type_t enumerator_data_type = {
261 "enumerator",
262 {
263 RUBY_REFS_LIST_PTR(enumerator_refs),
265 NULL, // Nothing allocated externally, so don't need a memsize function
266 NULL,
267 },
268 0, NULL, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
269};
270
271static struct enumerator *
272enumerator_ptr(VALUE obj)
273{
274 struct enumerator *ptr;
275
276 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
277 if (!ptr || UNDEF_P(ptr->obj)) {
278 rb_raise(rb_eArgError, "uninitialized enumerator");
279 }
280 return ptr;
281}
282
283static void
284proc_entry_mark(void *p)
285{
286 struct proc_entry *ptr = p;
287 rb_gc_mark_movable(ptr->proc);
288 rb_gc_mark_movable(ptr->memo);
289}
290
291static void
292proc_entry_compact(void *p)
293{
294 struct proc_entry *ptr = p;
295 ptr->proc = rb_gc_location(ptr->proc);
296 ptr->memo = rb_gc_location(ptr->memo);
297}
298
299static const rb_data_type_t proc_entry_data_type = {
300 "proc_entry",
301 {
302 proc_entry_mark,
304 NULL, // Nothing allocated externally, so don't need a memsize function
305 proc_entry_compact,
306 },
307 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
308};
309
310static struct proc_entry *
311proc_entry_ptr(VALUE proc_entry)
312{
313 struct proc_entry *ptr;
314
315 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
316
317 return ptr;
318}
319
320/*
321 * call-seq:
322 * obj.to_enum(method = :each, *args) -> enum
323 * obj.enum_for(method = :each, *args) -> enum
324 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
325 * obj.enum_for(method = :each, *args){|*args| block} -> enum
326 *
327 * Creates a new Enumerator which will enumerate by calling +method+ on
328 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
329 * values of enumerator.
330 *
331 * If a block is given, it will be used to calculate the size of
332 * the enumerator without the need to iterate it (see Enumerator#size).
333 *
334 * === Examples
335 *
336 * str = "xyz"
337 *
338 * enum = str.enum_for(:each_byte)
339 * enum.each { |b| puts b }
340 * # => 120
341 * # => 121
342 * # => 122
343 *
344 * # protect an array from being modified by some_method
345 * a = [1, 2, 3]
346 * some_method(a.to_enum)
347 *
348 * # String#split in block form is more memory-effective:
349 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
350 * # This could be rewritten more idiomatically with to_enum:
351 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
352 *
353 * It is typical to call to_enum when defining methods for
354 * a generic Enumerable, in case no block is passed.
355 *
356 * Here is such an example, with parameter passing and a sizing block:
357 *
358 * module Enumerable
359 * # a generic method to repeat the values of any enumerable
360 * def repeat(n)
361 * raise ArgumentError, "#{n} is negative!" if n < 0
362 * unless block_given?
363 * return to_enum(__method__, n) do # __method__ is :repeat here
364 * sz = size # Call size and multiply by n...
365 * sz * n if sz # but return nil if size itself is nil
366 * end
367 * end
368 * each do |*val|
369 * n.times { yield *val }
370 * end
371 * end
372 * end
373 *
374 * %i[hello world].repeat(2) { |w| puts w }
375 * # => Prints 'hello', 'hello', 'world', 'world'
376 * enum = (1..14).repeat(3)
377 * # => returns an Enumerator when called without a block
378 * enum.first(4) # => [1, 1, 1, 2]
379 * enum.size # => 42
380 */
381static VALUE
382obj_to_enum(int argc, VALUE *argv, VALUE obj)
383{
384 VALUE enumerator, meth = sym_each;
385
386 if (argc > 0) {
387 --argc;
388 meth = *argv++;
389 }
390 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
391 if (rb_block_given_p()) {
392 RB_OBJ_WRITE(enumerator, &enumerator_ptr(enumerator)->size, rb_block_proc());
393 }
394 return enumerator;
395}
396
397static VALUE
398enumerator_allocate(VALUE klass)
399{
400 struct enumerator *ptr;
401 VALUE enum_obj;
402
403 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
404 ptr->obj = Qundef;
405
406 return enum_obj;
407}
408
409static VALUE
410enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
411{
412 struct enumerator *ptr;
413
414 rb_check_frozen(enum_obj);
415 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
416
417 if (!ptr) {
418 rb_raise(rb_eArgError, "unallocated enumerator");
419 }
420
421 RB_OBJ_WRITE(enum_obj, &ptr->obj, obj);
422 ptr->meth = rb_to_id(meth);
423 if (argc) RB_OBJ_WRITE(enum_obj, &ptr->args, rb_ary_new4(argc, argv));
424 ptr->fib = 0;
425 ptr->dst = Qnil;
426 ptr->lookahead = Qundef;
427 ptr->feedvalue = Qundef;
428 ptr->stop_exc = Qfalse;
429 RB_OBJ_WRITE(enum_obj, &ptr->size, size);
430 ptr->size_fn = size_fn;
431 ptr->kw_splat = kw_splat;
432
433 return enum_obj;
434}
435
436static VALUE
437convert_to_feasible_size_value(VALUE obj)
438{
439 if (NIL_P(obj)) {
440 return obj;
441 }
442 else if (rb_respond_to(obj, id_call)) {
443 return obj;
444 }
445 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
446 return obj;
447 }
448 else {
449 return rb_to_int(obj);
450 }
451}
452
453/*
454 * call-seq:
455 * Enumerator.new(size = nil) { |yielder| ... }
456 *
457 * Creates a new Enumerator object, which can be used as an
458 * Enumerable.
459 *
460 * Iteration is defined by the given block, in
461 * which a "yielder" object, given as block parameter, can be used to
462 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
463 *
464 * fib = Enumerator.new do |y|
465 * a = b = 1
466 * loop do
467 * y << a
468 * a, b = b, a + b
469 * end
470 * end
471 *
472 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
473 *
474 * The optional parameter can be used to specify how to calculate the size
475 * in a lazy fashion (see Enumerator#size). It can either be a value or
476 * a callable object.
477 */
478static VALUE
479enumerator_initialize(int argc, VALUE *argv, VALUE obj)
480{
481 VALUE iter = rb_block_proc();
482 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
483 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
484 VALUE size = convert_to_feasible_size_value(arg0);
485
486 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
487}
488
489/* :nodoc: */
490static VALUE
491enumerator_init_copy(VALUE obj, VALUE orig)
492{
493 struct enumerator *ptr0, *ptr1;
494
495 if (!OBJ_INIT_COPY(obj, orig)) return obj;
496 ptr0 = enumerator_ptr(orig);
497 if (ptr0->fib) {
498 /* Fibers cannot be copied */
499 rb_raise(rb_eTypeError, "can't copy execution context");
500 }
501
502 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
503
504 if (!ptr1) {
505 rb_raise(rb_eArgError, "unallocated enumerator");
506 }
507
508 RB_OBJ_WRITE(obj, &ptr1->obj, ptr0->obj);
509 ptr1->meth = ptr0->meth;
510 RB_OBJ_WRITE(obj, &ptr1->args, ptr0->args);
511 ptr1->fib = 0;
512 ptr1->lookahead = Qundef;
513 ptr1->feedvalue = Qundef;
514 RB_OBJ_WRITE(obj, &ptr1->size, ptr0->size);
515 ptr1->size_fn = ptr0->size_fn;
516
517 return obj;
518}
519
520/*
521 * For backwards compatibility; use rb_enumeratorize_with_size
522 */
523VALUE
524rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
525{
526 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
527}
528
529static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
530static int lazy_precheck(VALUE procs);
531
532VALUE
533rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
534{
535 VALUE base_class = rb_cEnumerator;
536
537 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
538 base_class = rb_cLazy;
539 }
540 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
541 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
542 }
543
544 return enumerator_init(enumerator_allocate(base_class),
545 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
546}
547
548VALUE
549rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
550{
551 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
552}
553
554static VALUE
555enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
556{
557 int argc = 0;
558 const VALUE *argv = 0;
559 const struct enumerator *e = enumerator_ptr(obj);
560 ID meth = e->meth;
561
562 VALUE args = e->args;
563 if (args) {
564 argc = RARRAY_LENINT(args);
565 argv = RARRAY_CONST_PTR(args);
566 }
567
568 VALUE ret = rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
569
570 RB_GC_GUARD(args);
571
572 return ret;
573}
574
575/*
576 * call-seq:
577 * enum.each { |elm| block } -> obj
578 * enum.each -> enum
579 * enum.each(*appending_args) { |elm| block } -> obj
580 * enum.each(*appending_args) -> an_enumerator
581 *
582 * Iterates over the block according to how this Enumerator was constructed.
583 * If no block and no arguments are given, returns self.
584 *
585 * === Examples
586 *
587 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
588 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
589 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
590 *
591 * obj = Object.new
592 *
593 * def obj.each_arg(a, b=:b, *rest)
594 * yield a
595 * yield b
596 * yield rest
597 * :method_returned
598 * end
599 *
600 * enum = obj.to_enum :each_arg, :a, :x
601 *
602 * enum.each.to_a #=> [:a, :x, []]
603 * enum.each.equal?(enum) #=> true
604 * enum.each { |elm| elm } #=> :method_returned
605 *
606 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
607 * enum.each(:y, :z).equal?(enum) #=> false
608 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
609 *
610 */
611static VALUE
612enumerator_each(int argc, VALUE *argv, VALUE obj)
613{
614 struct enumerator *e = enumerator_ptr(obj);
615
616 if (argc > 0) {
617 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
618 if (args) {
619#if SIZEOF_INT < SIZEOF_LONG
620 /* check int range overflow */
621 rb_long2int(RARRAY_LEN(args) + argc);
622#endif
623 args = rb_ary_dup(args);
624 rb_ary_cat(args, argv, argc);
625 }
626 else {
627 args = rb_ary_new4(argc, argv);
628 }
629 RB_OBJ_WRITE(obj, &e->args, args);
630 e->size = Qnil;
631 e->size_fn = 0;
632 }
633 if (!rb_block_given_p()) return obj;
634
635 if (!lazy_precheck(e->procs)) return Qnil;
636
637 return enumerator_block_call(obj, 0, obj);
638}
639
640static VALUE
641enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
642{
643 struct MEMO *memo = (struct MEMO *)m;
644 VALUE idx = memo->v1;
645 MEMO_V1_SET(memo, rb_int_succ(idx));
646
647 if (argc <= 1)
648 return rb_yield_values(2, val, idx);
649
650 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
651}
652
653static VALUE
654enumerator_size(VALUE obj);
655
656static VALUE
657enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
658{
659 return enumerator_size(obj);
660}
661
662/*
663 * call-seq:
664 * e.with_index(offset = 0) {|(*args), idx| ... }
665 * e.with_index(offset = 0)
666 *
667 * Iterates the given block for each element with an index, which
668 * starts from +offset+. If no block is given, returns a new Enumerator
669 * that includes the index, starting from +offset+
670 *
671 * +offset+:: the starting index to use
672 *
673 */
674static VALUE
675enumerator_with_index(int argc, VALUE *argv, VALUE obj)
676{
677 VALUE memo;
678
679 rb_check_arity(argc, 0, 1);
680 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
681 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
682 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)MEMO_NEW(memo, 0, 0));
683}
684
685/*
686 * call-seq:
687 * e.each_with_index {|(*args), idx| ... }
688 * e.each_with_index
689 *
690 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
691 *
692 * If no block is given, a new Enumerator is returned that includes the index.
693 *
694 */
695static VALUE
696enumerator_each_with_index(VALUE obj)
697{
698 return enumerator_with_index(0, NULL, obj);
699}
700
701static VALUE
702enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
703{
704 if (argc <= 1)
705 return rb_yield_values(2, val, memo);
706
707 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
708}
709
710/*
711 * call-seq:
712 * e.each_with_object(obj) {|(*args), obj| ... }
713 * e.each_with_object(obj)
714 * e.with_object(obj) {|(*args), obj| ... }
715 * e.with_object(obj)
716 *
717 * Iterates the given block for each element with an arbitrary object, +obj+,
718 * and returns +obj+
719 *
720 * If no block is given, returns a new Enumerator.
721 *
722 * === Example
723 *
724 * to_three = Enumerator.new do |y|
725 * 3.times do |x|
726 * y << x
727 * end
728 * end
729 *
730 * to_three_with_string = to_three.with_object("foo")
731 * to_three_with_string.each do |x,string|
732 * puts "#{string}: #{x}"
733 * end
734 *
735 * # => foo: 0
736 * # => foo: 1
737 * # => foo: 2
738 */
739static VALUE
740enumerator_with_object(VALUE obj, VALUE memo)
741{
742 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
743 enumerator_block_call(obj, enumerator_with_object_i, memo);
744
745 return memo;
746}
747
748static VALUE
749next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
750{
751 struct enumerator *e = enumerator_ptr(obj);
752 VALUE feedvalue = Qnil;
753 VALUE args = rb_ary_new4(argc, argv);
754 rb_fiber_yield(1, &args);
755 if (!UNDEF_P(e->feedvalue)) {
756 feedvalue = e->feedvalue;
757 e->feedvalue = Qundef;
758 }
759 return feedvalue;
760}
761
762static VALUE
763next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
764{
765 struct enumerator *e = enumerator_ptr(obj);
766 VALUE nil = Qnil;
767 VALUE result;
768
769 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
770 RB_OBJ_WRITE(obj, &e->stop_exc, rb_exc_new2(rb_eStopIteration, "iteration reached an end"));
771 rb_ivar_set(e->stop_exc, id_result, result);
772 return rb_fiber_yield(1, &nil);
773}
774
775static void
776next_init(VALUE obj, struct enumerator *e)
777{
778 VALUE curr = rb_fiber_current();
779 RB_OBJ_WRITE(obj, &e->dst, curr);
780 RB_OBJ_WRITE(obj, &e->fib, rb_fiber_new(next_i, obj));
781 e->lookahead = Qundef;
782}
783
784static VALUE
785get_next_values(VALUE obj, struct enumerator *e)
786{
787 VALUE curr, vs;
788
789 if (e->stop_exc) {
790 VALUE exc = e->stop_exc;
791 VALUE result = rb_attr_get(exc, id_result);
792 VALUE mesg = rb_attr_get(exc, idMesg);
793 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
794 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
795 rb_ivar_set(stop_exc, id_cause, exc);
796 rb_ivar_set(stop_exc, id_result, result);
797 rb_exc_raise(stop_exc);
798 }
799
800 curr = rb_fiber_current();
801
802 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
803 next_init(obj, e);
804 }
805
806 vs = rb_fiber_resume(e->fib, 1, &curr);
807 if (e->stop_exc) {
808 e->fib = 0;
809 e->dst = Qnil;
810 e->lookahead = Qundef;
811 e->feedvalue = Qundef;
812 rb_exc_raise(e->stop_exc);
813 }
814 return vs;
815}
816
817/*
818 * call-seq:
819 * e.next_values -> array
820 *
821 * Returns the next object as an array in the enumerator, and move the
822 * internal position forward. When the position reached at the end,
823 * StopIteration is raised.
824 *
825 * See class-level notes about external iterators.
826 *
827 * This method can be used to distinguish <code>yield</code> and <code>yield
828 * nil</code>.
829 *
830 * === Example
831 *
832 * o = Object.new
833 * def o.each
834 * yield
835 * yield 1
836 * yield 1, 2
837 * yield nil
838 * yield [1, 2]
839 * end
840 * e = o.to_enum
841 * p e.next_values
842 * p e.next_values
843 * p e.next_values
844 * p e.next_values
845 * p e.next_values
846 * e = o.to_enum
847 * p e.next
848 * p e.next
849 * p e.next
850 * p e.next
851 * p e.next
852 *
853 * ## yield args next_values next
854 * # yield [] nil
855 * # yield 1 [1] 1
856 * # yield 1, 2 [1, 2] [1, 2]
857 * # yield nil [nil] nil
858 * # yield [1, 2] [[1, 2]] [1, 2]
859 *
860 */
861
862static VALUE
863enumerator_next_values(VALUE obj)
864{
865 struct enumerator *e = enumerator_ptr(obj);
866 VALUE vs;
867
868 rb_check_frozen(obj);
869
870 if (!UNDEF_P(e->lookahead)) {
871 vs = e->lookahead;
872 e->lookahead = Qundef;
873 return vs;
874 }
875
876 return get_next_values(obj, e);
877}
878
879static VALUE
880ary2sv(VALUE args, int dup)
881{
882 if (!RB_TYPE_P(args, T_ARRAY))
883 return args;
884
885 switch (RARRAY_LEN(args)) {
886 case 0:
887 return Qnil;
888
889 case 1:
890 return RARRAY_AREF(args, 0);
891
892 default:
893 if (dup)
894 return rb_ary_dup(args);
895 return args;
896 }
897}
898
899/*
900 * call-seq:
901 * e.next -> object
902 *
903 * Returns the next object in the enumerator, and move the internal position
904 * forward. When the position reached at the end, StopIteration is raised.
905 *
906 * === Example
907 *
908 * a = [1,2,3]
909 * e = a.to_enum
910 * p e.next #=> 1
911 * p e.next #=> 2
912 * p e.next #=> 3
913 * p e.next #raises StopIteration
914 *
915 * See class-level notes about external iterators.
916 *
917 */
918
919static VALUE
920enumerator_next(VALUE obj)
921{
922 VALUE vs = enumerator_next_values(obj);
923 return ary2sv(vs, 0);
924}
925
926static VALUE
927enumerator_peek_values(VALUE obj)
928{
929 struct enumerator *e = enumerator_ptr(obj);
930
931 rb_check_frozen(obj);
932
933 if (UNDEF_P(e->lookahead)) {
934 RB_OBJ_WRITE(obj, &e->lookahead, get_next_values(obj, e));
935 }
936
937 return e->lookahead;
938}
939
940/*
941 * call-seq:
942 * e.peek_values -> array
943 *
944 * Returns the next object as an array, similar to Enumerator#next_values, but
945 * doesn't move the internal position forward. If the position is already at
946 * the end, StopIteration is raised.
947 *
948 * See class-level notes about external iterators.
949 *
950 * === Example
951 *
952 * o = Object.new
953 * def o.each
954 * yield
955 * yield 1
956 * yield 1, 2
957 * end
958 * e = o.to_enum
959 * p e.peek_values #=> []
960 * e.next
961 * p e.peek_values #=> [1]
962 * p e.peek_values #=> [1]
963 * e.next
964 * p e.peek_values #=> [1, 2]
965 * e.next
966 * p e.peek_values # raises StopIteration
967 *
968 */
969
970static VALUE
971enumerator_peek_values_m(VALUE obj)
972{
973 return rb_ary_dup(enumerator_peek_values(obj));
974}
975
976/*
977 * call-seq:
978 * e.peek -> object
979 *
980 * Returns the next object in the enumerator, but doesn't move the internal
981 * position forward. If the position is already at the end, StopIteration
982 * is raised.
983 *
984 * See class-level notes about external iterators.
985 *
986 * === Example
987 *
988 * a = [1,2,3]
989 * e = a.to_enum
990 * p e.next #=> 1
991 * p e.peek #=> 2
992 * p e.peek #=> 2
993 * p e.peek #=> 2
994 * p e.next #=> 2
995 * p e.next #=> 3
996 * p e.peek #raises StopIteration
997 *
998 */
999
1000static VALUE
1001enumerator_peek(VALUE obj)
1002{
1003 VALUE vs = enumerator_peek_values(obj);
1004 return ary2sv(vs, 1);
1005}
1006
1007/*
1008 * call-seq:
1009 * e.feed obj -> nil
1010 *
1011 * Sets the value to be returned by the next yield inside +e+.
1012 *
1013 * If the value is not set, the yield returns nil.
1014 *
1015 * This value is cleared after being yielded.
1016 *
1017 * # Array#map passes the array's elements to "yield" and collects the
1018 * # results of "yield" as an array.
1019 * # Following example shows that "next" returns the passed elements and
1020 * # values passed to "feed" are collected as an array which can be
1021 * # obtained by StopIteration#result.
1022 * e = [1,2,3].map
1023 * p e.next #=> 1
1024 * e.feed "a"
1025 * p e.next #=> 2
1026 * e.feed "b"
1027 * p e.next #=> 3
1028 * e.feed "c"
1029 * begin
1030 * e.next
1031 * rescue StopIteration
1032 * p $!.result #=> ["a", "b", "c"]
1033 * end
1034 *
1035 * o = Object.new
1036 * def o.each
1037 * x = yield # (2) blocks
1038 * p x # (5) => "foo"
1039 * x = yield # (6) blocks
1040 * p x # (8) => nil
1041 * x = yield # (9) blocks
1042 * p x # not reached w/o another e.next
1043 * end
1044 *
1045 * e = o.to_enum
1046 * e.next # (1)
1047 * e.feed "foo" # (3)
1048 * e.next # (4)
1049 * e.next # (7)
1050 * # (10)
1051 */
1052
1053static VALUE
1054enumerator_feed(VALUE obj, VALUE v)
1055{
1056 struct enumerator *e = enumerator_ptr(obj);
1057
1058 rb_check_frozen(obj);
1059
1060 if (!UNDEF_P(e->feedvalue)) {
1061 rb_raise(rb_eTypeError, "feed value already set");
1062 }
1063 RB_OBJ_WRITE(obj, &e->feedvalue, v);
1064
1065 return Qnil;
1066}
1067
1068/*
1069 * call-seq:
1070 * e.rewind -> e
1071 *
1072 * Rewinds the enumeration sequence to the beginning.
1073 *
1074 * If the enclosed object responds to a "rewind" method, it is called.
1075 */
1076
1077static VALUE
1078enumerator_rewind(VALUE obj)
1079{
1080 struct enumerator *e = enumerator_ptr(obj);
1081
1082 rb_check_frozen(obj);
1083
1084 rb_check_funcall(e->obj, id_rewind, 0, 0);
1085
1086 e->fib = 0;
1087 e->dst = Qnil;
1088 e->lookahead = Qundef;
1089 e->feedvalue = Qundef;
1090 e->stop_exc = Qfalse;
1091 return obj;
1092}
1093
1094static struct generator *generator_ptr(VALUE obj);
1095static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1096
1097static VALUE
1098inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1099{
1100 struct enumerator *e;
1101 VALUE eobj, str, cname;
1102
1103 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1104
1105 cname = rb_obj_class(obj);
1106
1107 if (!e || UNDEF_P(e->obj)) {
1108 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1109 }
1110
1111 if (recur) {
1112 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1113 return str;
1114 }
1115
1116 if (e->procs) {
1117 long i;
1118
1119 eobj = generator_ptr(e->obj)->obj;
1120 /* In case procs chained enumerator traversing all proc entries manually */
1121 if (rb_obj_class(eobj) == cname) {
1122 str = rb_inspect(eobj);
1123 }
1124 else {
1125 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1126 }
1127 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1128 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1129 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1130 rb_str_buf_cat2(str, ">");
1131 }
1132 return str;
1133 }
1134
1135 eobj = rb_attr_get(obj, id_receiver);
1136 if (NIL_P(eobj)) {
1137 eobj = e->obj;
1138 }
1139
1140 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1141 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1142 append_method(obj, str, e->meth, e->args);
1143
1144 rb_str_buf_cat2(str, ">");
1145
1146 return str;
1147}
1148
1149static int
1150key_symbol_p(VALUE key, VALUE val, VALUE arg)
1151{
1152 if (SYMBOL_P(key)) return ST_CONTINUE;
1153 *(int *)arg = FALSE;
1154 return ST_STOP;
1155}
1156
1157static int
1158kwd_append(VALUE key, VALUE val, VALUE str)
1159{
1160 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1161 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1162 return ST_CONTINUE;
1163}
1164
1165static VALUE
1166append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1167{
1168 VALUE method, eargs;
1169
1170 method = rb_attr_get(obj, id_method);
1171 if (method != Qfalse) {
1172 if (!NIL_P(method)) {
1173 Check_Type(method, T_SYMBOL);
1174 method = rb_sym2str(method);
1175 }
1176 else {
1177 method = rb_id2str(default_method);
1178 }
1179 rb_str_buf_cat2(str, ":");
1180 rb_str_buf_append(str, method);
1181 }
1182
1183 eargs = rb_attr_get(obj, id_arguments);
1184 if (NIL_P(eargs)) {
1185 eargs = default_args;
1186 }
1187 if (eargs != Qfalse) {
1188 long argc = RARRAY_LEN(eargs);
1189 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1190
1191 if (argc > 0) {
1192 VALUE kwds = Qnil;
1193
1194 rb_str_buf_cat2(str, "(");
1195
1196 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1197 int all_key = TRUE;
1198 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1199 if (all_key) kwds = argv[--argc];
1200 }
1201
1202 while (argc--) {
1203 VALUE arg = *argv++;
1204
1205 rb_str_append(str, rb_inspect(arg));
1206 rb_str_buf_cat2(str, ", ");
1207 }
1208 if (!NIL_P(kwds)) {
1209 rb_hash_foreach(kwds, kwd_append, str);
1210 }
1211 rb_str_set_len(str, RSTRING_LEN(str)-2);
1212 rb_str_buf_cat2(str, ")");
1213 }
1214 }
1215
1216 return str;
1217}
1218
1219/*
1220 * call-seq:
1221 * e.inspect -> string
1222 *
1223 * Creates a printable version of <i>e</i>.
1224 */
1225
1226static VALUE
1227enumerator_inspect(VALUE obj)
1228{
1229 return rb_exec_recursive(inspect_enumerator, obj, 0);
1230}
1231
1232/*
1233 * call-seq:
1234 * e.size -> int, Float::INFINITY or nil
1235 *
1236 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1237 *
1238 * (1..100).to_a.permutation(4).size # => 94109400
1239 * loop.size # => Float::INFINITY
1240 * (1..100).drop_while.size # => nil
1241 */
1242
1243static VALUE
1244enumerator_size(VALUE obj)
1245{
1246 struct enumerator *e = enumerator_ptr(obj);
1247 int argc = 0;
1248 const VALUE *argv = NULL;
1249 VALUE size;
1250
1251 if (e->procs) {
1252 struct generator *g = generator_ptr(e->obj);
1253 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1254 long i = 0;
1255
1256 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1257 VALUE proc = RARRAY_AREF(e->procs, i);
1258 struct proc_entry *entry = proc_entry_ptr(proc);
1259 lazyenum_size_func *size_fn = entry->fn->size;
1260 if (!size_fn) {
1261 return Qnil;
1262 }
1263 receiver = (*size_fn)(proc, receiver);
1264 }
1265 return receiver;
1266 }
1267
1268 if (e->size_fn) {
1269 return (*e->size_fn)(e->obj, e->args, obj);
1270 }
1271 if (e->args) {
1272 argc = (int)RARRAY_LEN(e->args);
1273 argv = RARRAY_CONST_PTR(e->args);
1274 }
1275 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1276 if (!UNDEF_P(size)) return size;
1277 return e->size;
1278}
1279
1280/*
1281 * Yielder
1282 */
1283static void
1284yielder_mark(void *p)
1285{
1286 struct yielder *ptr = p;
1287 rb_gc_mark_movable(ptr->proc);
1288}
1289
1290static void
1291yielder_compact(void *p)
1292{
1293 struct yielder *ptr = p;
1294 ptr->proc = rb_gc_location(ptr->proc);
1295}
1296
1297static const rb_data_type_t yielder_data_type = {
1298 "yielder",
1299 {
1300 yielder_mark,
1302 NULL,
1303 yielder_compact,
1304 },
1305 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1306};
1307
1308static struct yielder *
1309yielder_ptr(VALUE obj)
1310{
1311 struct yielder *ptr;
1312
1313 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1314 if (!ptr || UNDEF_P(ptr->proc)) {
1315 rb_raise(rb_eArgError, "uninitialized yielder");
1316 }
1317 return ptr;
1318}
1319
1320/* :nodoc: */
1321static VALUE
1322yielder_allocate(VALUE klass)
1323{
1324 struct yielder *ptr;
1325 VALUE obj;
1326
1327 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1328 ptr->proc = Qundef;
1329
1330 return obj;
1331}
1332
1333static VALUE
1334yielder_init(VALUE obj, VALUE proc)
1335{
1336 struct yielder *ptr;
1337
1338 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1339
1340 if (!ptr) {
1341 rb_raise(rb_eArgError, "unallocated yielder");
1342 }
1343
1344 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1345
1346 return obj;
1347}
1348
1349/* :nodoc: */
1350static VALUE
1351yielder_initialize(VALUE obj)
1352{
1353 rb_need_block();
1354
1355 return yielder_init(obj, rb_block_proc());
1356}
1357
1358/* :nodoc: */
1359static VALUE
1360yielder_yield(VALUE obj, VALUE args)
1361{
1362 struct yielder *ptr = yielder_ptr(obj);
1363
1364 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1365}
1366
1367/* :nodoc: */
1368static VALUE
1369yielder_yield_push(VALUE obj, VALUE arg)
1370{
1371 struct yielder *ptr = yielder_ptr(obj);
1372
1373 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1374
1375 return obj;
1376}
1377
1378/*
1379 * Returns a Proc object that takes arguments and yields them.
1380 *
1381 * This method is implemented so that a Yielder object can be directly
1382 * passed to another method as a block argument.
1383 *
1384 * enum = Enumerator.new { |y|
1385 * Dir.glob("*.rb") { |file|
1386 * File.open(file) { |f| f.each_line(&y) }
1387 * }
1388 * }
1389 */
1390static VALUE
1391yielder_to_proc(VALUE obj)
1392{
1393 VALUE method = rb_obj_method(obj, sym_yield);
1394
1395 return rb_funcall(method, idTo_proc, 0);
1396}
1397
1398static VALUE
1399yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1400{
1401 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1402}
1403
1404static VALUE
1405yielder_new(void)
1406{
1407 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1408}
1409
1410/*
1411 * Generator
1412 */
1413static void
1414generator_mark(void *p)
1415{
1416 struct generator *ptr = p;
1417 rb_gc_mark_movable(ptr->proc);
1418 rb_gc_mark_movable(ptr->obj);
1419}
1420
1421static void
1422generator_compact(void *p)
1423{
1424 struct generator *ptr = p;
1425 ptr->proc = rb_gc_location(ptr->proc);
1426 ptr->obj = rb_gc_location(ptr->obj);
1427}
1428
1429static const rb_data_type_t generator_data_type = {
1430 "generator",
1431 {
1432 generator_mark,
1434 NULL,
1435 generator_compact,
1436 },
1437 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1438};
1439
1440static struct generator *
1441generator_ptr(VALUE obj)
1442{
1443 struct generator *ptr;
1444
1445 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1446 if (!ptr || UNDEF_P(ptr->proc)) {
1447 rb_raise(rb_eArgError, "uninitialized generator");
1448 }
1449 return ptr;
1450}
1451
1452/* :nodoc: */
1453static VALUE
1454generator_allocate(VALUE klass)
1455{
1456 struct generator *ptr;
1457 VALUE obj;
1458
1459 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1460 ptr->proc = Qundef;
1461
1462 return obj;
1463}
1464
1465static VALUE
1466generator_init(VALUE obj, VALUE proc)
1467{
1468 struct generator *ptr;
1469
1470 rb_check_frozen(obj);
1471 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1472
1473 if (!ptr) {
1474 rb_raise(rb_eArgError, "unallocated generator");
1475 }
1476
1477 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1478
1479 return obj;
1480}
1481
1482/* :nodoc: */
1483static VALUE
1484generator_initialize(int argc, VALUE *argv, VALUE obj)
1485{
1486 VALUE proc;
1487
1488 if (argc == 0) {
1489 rb_need_block();
1490
1491 proc = rb_block_proc();
1492 }
1493 else {
1494 rb_scan_args(argc, argv, "1", &proc);
1495
1496 if (!rb_obj_is_proc(proc))
1497 rb_raise(rb_eTypeError,
1498 "wrong argument type %"PRIsVALUE" (expected Proc)",
1499 rb_obj_class(proc));
1500
1501 if (rb_block_given_p()) {
1502 rb_warn("given block not used");
1503 }
1504 }
1505
1506 return generator_init(obj, proc);
1507}
1508
1509/* :nodoc: */
1510static VALUE
1511generator_init_copy(VALUE obj, VALUE orig)
1512{
1513 struct generator *ptr0, *ptr1;
1514
1515 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1516
1517 ptr0 = generator_ptr(orig);
1518
1519 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1520
1521 if (!ptr1) {
1522 rb_raise(rb_eArgError, "unallocated generator");
1523 }
1524
1525 RB_OBJ_WRITE(obj, &ptr1->proc, ptr0->proc);
1526
1527 return obj;
1528}
1529
1530/* :nodoc: */
1531static VALUE
1532generator_each(int argc, VALUE *argv, VALUE obj)
1533{
1534 struct generator *ptr = generator_ptr(obj);
1535 VALUE args = rb_ary_new2(argc + 1);
1536
1537 rb_ary_push(args, yielder_new());
1538 if (argc > 0) {
1539 rb_ary_cat(args, argv, argc);
1540 }
1541
1542 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1543}
1544
1545/* Lazy Enumerator methods */
1546static VALUE
1547enum_size(VALUE self)
1548{
1549 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1550 return UNDEF_P(r) ? Qnil : r;
1551}
1552
1553static VALUE
1554lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1555{
1556 return enum_size(self);
1557}
1558
1559#define lazy_receiver_size lazy_map_size
1560
1561static VALUE
1562lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1563{
1564 VALUE result;
1565 if (argc == 1) {
1566 VALUE args[2];
1567 args[0] = m;
1568 args[1] = val;
1569 result = rb_yield_values2(2, args);
1570 }
1571 else {
1572 VALUE args;
1573 int len = rb_long2int((long)argc + 1);
1574 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1575
1576 nargv[0] = m;
1577 if (argc > 0) {
1578 MEMCPY(nargv + 1, argv, VALUE, argc);
1579 }
1580 result = rb_yield_values2(len, nargv);
1581 ALLOCV_END(args);
1582 }
1583 if (UNDEF_P(result)) rb_iter_break();
1584 return Qnil;
1585}
1586
1587static VALUE
1588lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1589{
1590 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1591 return Qnil;
1592}
1593
1594#define memo_value v2
1595#define memo_flags u3.state
1596#define LAZY_MEMO_BREAK 1
1597#define LAZY_MEMO_PACKED 2
1598#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1599#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1600#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1601#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1602#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1603#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1604#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1605
1606static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1607
1608static VALUE
1609lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1610{
1611 VALUE yielder = RARRAY_AREF(m, 0);
1612 VALUE procs_array = RARRAY_AREF(m, 1);
1613 VALUE memos = rb_attr_get(yielder, id_memo);
1614 struct MEMO *result;
1615
1616 result = MEMO_NEW(m, rb_enum_values_pack(argc, argv),
1617 argc > 1 ? LAZY_MEMO_PACKED : 0);
1618 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1619}
1620
1621static VALUE
1622lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1623{
1624 VALUE m = result->v1;
1625 VALUE yielder = RARRAY_AREF(m, 0);
1626 VALUE procs_array = RARRAY_AREF(m, 1);
1627 VALUE memos = rb_attr_get(yielder, id_memo);
1628 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1629 if (argc > 1)
1630 LAZY_MEMO_SET_PACKED(result);
1631 else
1632 LAZY_MEMO_RESET_PACKED(result);
1633 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1634}
1635
1636static VALUE
1637lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1638{
1639 int cont = 1;
1640
1641 for (; i < RARRAY_LEN(procs_array); i++) {
1642 VALUE proc = RARRAY_AREF(procs_array, i);
1643 struct proc_entry *entry = proc_entry_ptr(proc);
1644 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1645 cont = 0;
1646 break;
1647 }
1648 }
1649
1650 if (cont) {
1651 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1652 }
1653 if (LAZY_MEMO_BREAK_P(result)) {
1654 rb_iter_break();
1655 }
1656 return result->memo_value;
1657}
1658
1659static VALUE
1660lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1661{
1662 VALUE procs = RARRAY_AREF(m, 1);
1663
1664 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1665 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1666 lazy_init_yielder, rb_ary_new3(2, val, procs));
1667 return Qnil;
1668}
1669
1670static VALUE
1671lazy_generator_init(VALUE enumerator, VALUE procs)
1672{
1674 VALUE obj;
1675 struct generator *gen_ptr;
1676 struct enumerator *e = enumerator_ptr(enumerator);
1677
1678 if (RARRAY_LEN(procs) > 0) {
1679 struct generator *old_gen_ptr = generator_ptr(e->obj);
1680 obj = old_gen_ptr->obj;
1681 }
1682 else {
1683 obj = enumerator;
1684 }
1685
1686 generator = generator_allocate(rb_cGenerator);
1687
1688 rb_block_call(generator, id_initialize, 0, 0,
1689 lazy_init_block, rb_ary_new3(2, obj, procs));
1690
1691 gen_ptr = generator_ptr(generator);
1692 RB_OBJ_WRITE(generator, &gen_ptr->obj, obj);
1693
1694 return generator;
1695}
1696
1697static int
1698lazy_precheck(VALUE procs)
1699{
1700 if (RTEST(procs)) {
1701 long num_procs = RARRAY_LEN(procs), i = num_procs;
1702 while (i-- > 0) {
1703 VALUE proc = RARRAY_AREF(procs, i);
1704 struct proc_entry *entry = proc_entry_ptr(proc);
1705 lazyenum_precheck_func *precheck = entry->fn->precheck;
1706 if (precheck && !precheck(proc)) return FALSE;
1707 }
1708 }
1709
1710 return TRUE;
1711}
1712
1713/*
1714 * Document-class: Enumerator::Lazy
1715 *
1716 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1717 * chains of operations without evaluating them immediately, and evaluating
1718 * values on as-needed basis. In order to do so it redefines most of Enumerable
1719 * methods so that they just construct another lazy enumerator.
1720 *
1721 * Enumerator::Lazy can be constructed from any Enumerable with the
1722 * Enumerable#lazy method.
1723 *
1724 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1725 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1726 *
1727 * The real enumeration is performed when any non-redefined Enumerable method
1728 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1729 * as #force for more semantic code):
1730 *
1731 * lazy.first(2)
1732 * #=> [21, 23]
1733 *
1734 * lazy.force
1735 * #=> [21, 23, 25, 27, 29]
1736 *
1737 * Note that most Enumerable methods that could be called with or without
1738 * a block, on Enumerator::Lazy will always require a block:
1739 *
1740 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1741 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1742 *
1743 * This class allows idiomatic calculations on long or infinite sequences, as well
1744 * as chaining of calculations without constructing intermediate arrays.
1745 *
1746 * Example for working with a slowly calculated sequence:
1747 *
1748 * require 'open-uri'
1749 *
1750 * # This will fetch all URLs before selecting
1751 * # necessary data
1752 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1753 * .select { |data| data.key?('stats') }
1754 * .first(5)
1755 *
1756 * # This will fetch URLs one-by-one, only till
1757 * # there is enough data to satisfy the condition
1758 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1759 * .select { |data| data.key?('stats') }
1760 * .first(5)
1761 *
1762 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1763 * is suitable for returning or passing to another method that expects
1764 * a normal enumerator.
1765 *
1766 * def active_items
1767 * groups
1768 * .lazy
1769 * .flat_map(&:items)
1770 * .reject(&:disabled)
1771 * .eager
1772 * end
1773 *
1774 * # This works lazily; if a checked item is found, it stops
1775 * # iteration and does not look into remaining groups.
1776 * first_checked = active_items.find(&:checked)
1777 *
1778 * # This returns an array of items like a normal enumerator does.
1779 * all_checked = active_items.select(&:checked)
1780 *
1781 */
1782
1783/*
1784 * call-seq:
1785 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1786 *
1787 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1788 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1789 * to the given block. The block can yield values back using +yielder+.
1790 * For example, to create a "filter+map" enumerator:
1791 *
1792 * def filter_map(sequence)
1793 * Lazy.new(sequence) do |yielder, *values|
1794 * result = yield *values
1795 * yielder << result if result
1796 * end
1797 * end
1798 *
1799 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1800 * #=> [4, 16, 36, 64, 100]
1801 */
1802static VALUE
1803lazy_initialize(int argc, VALUE *argv, VALUE self)
1804{
1805 VALUE obj, size = Qnil;
1807
1808 rb_check_arity(argc, 1, 2);
1809 if (!rb_block_given_p()) {
1810 rb_raise(rb_eArgError, "tried to call lazy new without a block");
1811 }
1812 obj = argv[0];
1813 if (argc > 1) {
1814 size = argv[1];
1815 }
1816 generator = generator_allocate(rb_cGenerator);
1817 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1818 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1819 rb_ivar_set(self, id_receiver, obj);
1820
1821 return self;
1822}
1823
1824#if 0 /* for RDoc */
1825/*
1826 * call-seq:
1827 * lazy.to_a -> array
1828 * lazy.force -> array
1829 *
1830 * Expands +lazy+ enumerator to an array.
1831 * See Enumerable#to_a.
1832 */
1833static VALUE
1834lazy_to_a(VALUE self)
1835{
1836}
1837#endif
1838
1839static void
1840lazy_set_args(VALUE lazy, VALUE args)
1841{
1842 ID id = rb_frame_this_func();
1843 rb_ivar_set(lazy, id_method, ID2SYM(id));
1844 if (NIL_P(args)) {
1845 /* Qfalse indicates that the arguments are empty */
1846 rb_ivar_set(lazy, id_arguments, Qfalse);
1847 }
1848 else {
1849 rb_ivar_set(lazy, id_arguments, args);
1850 }
1851}
1852
1853#if 0
1854static VALUE
1855lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1856{
1857 struct enumerator *e = enumerator_ptr(lazy);
1858 lazy_set_args(lazy, args);
1859 e->size_fn = size_fn;
1860 return lazy;
1861}
1862#endif
1863
1864static VALUE
1865lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1866 const lazyenum_funcs *fn)
1867{
1868 struct enumerator *new_e;
1869 VALUE new_obj;
1870 VALUE new_generator;
1871 VALUE new_procs;
1872 struct enumerator *e = enumerator_ptr(obj);
1873 struct proc_entry *entry;
1874 VALUE entry_obj = TypedData_Make_Struct(rb_cObject, struct proc_entry,
1875 &proc_entry_data_type, entry);
1876 if (rb_block_given_p()) {
1877 RB_OBJ_WRITE(entry_obj, &entry->proc, rb_block_proc());
1878 }
1879 entry->fn = fn;
1880 RB_OBJ_WRITE(entry_obj, &entry->memo, args);
1881
1882 lazy_set_args(entry_obj, memo);
1883
1884 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1885 new_generator = lazy_generator_init(obj, new_procs);
1886 rb_ary_push(new_procs, entry_obj);
1887
1888 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1889 new_e = RTYPEDDATA_GET_DATA(new_obj);
1890 RB_OBJ_WRITE(new_obj, &new_e->obj, new_generator);
1891 RB_OBJ_WRITE(new_obj, &new_e->procs, new_procs);
1892
1893 if (argc > 0) {
1894 new_e->meth = rb_to_id(*argv++);
1895 --argc;
1896 }
1897 else {
1898 new_e->meth = id_each;
1899 }
1900
1901 RB_OBJ_WRITE(new_obj, &new_e->args, rb_ary_new4(argc, argv));
1902
1903 return new_obj;
1904}
1905
1906/*
1907 * call-seq:
1908 * e.lazy -> lazy_enumerator
1909 *
1910 * Returns an Enumerator::Lazy, which redefines most Enumerable
1911 * methods to postpone enumeration and enumerate values only on an
1912 * as-needed basis.
1913 *
1914 * === Example
1915 *
1916 * The following program finds pythagorean triples:
1917 *
1918 * def pythagorean_triples
1919 * (1..Float::INFINITY).lazy.flat_map {|z|
1920 * (1..z).flat_map {|x|
1921 * (x..z).select {|y|
1922 * x**2 + y**2 == z**2
1923 * }.map {|y|
1924 * [x, y, z]
1925 * }
1926 * }
1927 * }
1928 * end
1929 * # show first ten pythagorean triples
1930 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1931 * p pythagorean_triples.first(10) # first is eager
1932 * # show pythagorean triples less than 100
1933 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1934 */
1935static VALUE
1936enumerable_lazy(VALUE obj)
1937{
1938 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1939 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1940 rb_ivar_set(result, id_method, Qfalse);
1941 return result;
1942}
1943
1944static VALUE
1945lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1946{
1947 return enumerator_init(enumerator_allocate(rb_cLazy),
1948 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1949}
1950
1951/*
1952 * call-seq:
1953 * lzy.to_enum(method = :each, *args) -> lazy_enum
1954 * lzy.enum_for(method = :each, *args) -> lazy_enum
1955 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1956 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1957 *
1958 * Similar to Object#to_enum, except it returns a lazy enumerator.
1959 * This makes it easy to define Enumerable methods that will
1960 * naturally remain lazy if called from a lazy enumerator.
1961 *
1962 * For example, continuing from the example in Object#to_enum:
1963 *
1964 * # See Object#to_enum for the definition of repeat
1965 * r = 1..Float::INFINITY
1966 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1967 * r.repeat(2).class # => Enumerator
1968 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1969 * # works naturally on lazy enumerator:
1970 * r.lazy.repeat(2).class # => Enumerator::Lazy
1971 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1972 */
1973
1974static VALUE
1975lazy_to_enum(int argc, VALUE *argv, VALUE self)
1976{
1977 VALUE lazy, meth = sym_each, super_meth;
1978
1979 if (argc > 0) {
1980 --argc;
1981 meth = *argv++;
1982 }
1983 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
1984 meth = super_meth;
1985 }
1986 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
1987 if (rb_block_given_p()) {
1988 RB_OBJ_WRITE(lazy, &enumerator_ptr(lazy)->size, rb_block_proc());
1989 }
1990 return lazy;
1991}
1992
1993static VALUE
1994lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
1995{
1996 return enum_size(self);
1997}
1998
1999/*
2000 * call-seq:
2001 * lzy.eager -> enum
2002 *
2003 * Returns a non-lazy Enumerator converted from the lazy enumerator.
2004 */
2005
2006static VALUE
2007lazy_eager(VALUE self)
2008{
2009 return enumerator_init(enumerator_allocate(rb_cEnumerator),
2010 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
2011}
2012
2013static VALUE
2014lazyenum_yield(VALUE proc_entry, struct MEMO *result)
2015{
2016 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2017 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2018}
2019
2020static VALUE
2021lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2022{
2023 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2024 int argc = 1;
2025 const VALUE *argv = &result->memo_value;
2026 if (LAZY_MEMO_PACKED_P(result)) {
2027 const VALUE args = *argv;
2028 argc = RARRAY_LENINT(args);
2029 argv = RARRAY_CONST_PTR(args);
2030 }
2031 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2032}
2033
2034static struct MEMO *
2035lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2036{
2037 VALUE value = lazyenum_yield_values(proc_entry, result);
2038 LAZY_MEMO_SET_VALUE(result, value);
2039 LAZY_MEMO_RESET_PACKED(result);
2040 return result;
2041}
2042
2043static VALUE
2044lazy_map_size(VALUE entry, VALUE receiver)
2045{
2046 return receiver;
2047}
2048
2049static const lazyenum_funcs lazy_map_funcs = {
2050 lazy_map_proc, lazy_map_size,
2051};
2052
2053/*
2054 * call-seq:
2055 * lazy.collect { |obj| block } -> lazy_enumerator
2056 * lazy.map { |obj| block } -> lazy_enumerator
2057 *
2058 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2059 *
2060 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2061 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2062 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2063 * #=> [1, 4, 9]
2064 */
2065
2066static VALUE
2067lazy_map(VALUE obj)
2068{
2069 if (!rb_block_given_p()) {
2070 rb_raise(rb_eArgError, "tried to call lazy map without a block");
2071 }
2072
2073 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2074}
2075
2077 struct MEMO *result;
2078 long index;
2079};
2080
2081static VALUE
2082lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2083{
2084 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2085
2086 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2087}
2088
2089static struct MEMO *
2090lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2091{
2092 VALUE value = lazyenum_yield_values(proc_entry, result);
2093 VALUE ary = 0;
2094 const long proc_index = memo_index + 1;
2095 int break_p = LAZY_MEMO_BREAK_P(result);
2096
2097 if (RB_TYPE_P(value, T_ARRAY)) {
2098 ary = value;
2099 }
2100 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2101 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2102 LAZY_MEMO_RESET_BREAK(result);
2103 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2104 if (break_p) LAZY_MEMO_SET_BREAK(result);
2105 return 0;
2106 }
2107
2108 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2109 long i;
2110 LAZY_MEMO_RESET_BREAK(result);
2111 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2112 const VALUE argv = RARRAY_AREF(ary, i);
2113 lazy_yielder_yield(result, proc_index, 1, &argv);
2114 }
2115 if (break_p) LAZY_MEMO_SET_BREAK(result);
2116 if (i >= RARRAY_LEN(ary)) return 0;
2117 value = RARRAY_AREF(ary, i);
2118 }
2119 LAZY_MEMO_SET_VALUE(result, value);
2120 LAZY_MEMO_RESET_PACKED(result);
2121 return result;
2122}
2123
2124static const lazyenum_funcs lazy_flat_map_funcs = {
2125 lazy_flat_map_proc, 0,
2126};
2127
2128/*
2129 * call-seq:
2130 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2131 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2132 *
2133 * Returns a new lazy enumerator with the concatenated results of running
2134 * +block+ once for every element in the lazy enumerator.
2135 *
2136 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2137 * #=> ["f", "o", "o", "b", "a", "r"]
2138 *
2139 * A value +x+ returned by +block+ is decomposed if either of
2140 * the following conditions is true:
2141 *
2142 * * +x+ responds to both each and force, which means that
2143 * +x+ is a lazy enumerator.
2144 * * +x+ is an array or responds to to_ary.
2145 *
2146 * Otherwise, +x+ is contained as-is in the return value.
2147 *
2148 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2149 * #=> [{:a=>1}, {:b=>2}]
2150 */
2151static VALUE
2152lazy_flat_map(VALUE obj)
2153{
2154 if (!rb_block_given_p()) {
2155 rb_raise(rb_eArgError, "tried to call lazy flat_map without a block");
2156 }
2157
2158 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2159}
2160
2161static struct MEMO *
2162lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2163{
2164 VALUE chain = lazyenum_yield(proc_entry, result);
2165 if (!RTEST(chain)) return 0;
2166 return result;
2167}
2168
2169static const lazyenum_funcs lazy_select_funcs = {
2170 lazy_select_proc, 0,
2171};
2172
2173/*
2174 * call-seq:
2175 * lazy.find_all { |obj| block } -> lazy_enumerator
2176 * lazy.select { |obj| block } -> lazy_enumerator
2177 * lazy.filter { |obj| block } -> lazy_enumerator
2178 *
2179 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2180 */
2181static VALUE
2182lazy_select(VALUE obj)
2183{
2184 if (!rb_block_given_p()) {
2185 rb_raise(rb_eArgError, "tried to call lazy select without a block");
2186 }
2187
2188 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2189}
2190
2191static struct MEMO *
2192lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2193{
2194 VALUE value = lazyenum_yield_values(proc_entry, result);
2195 if (!RTEST(value)) return 0;
2196 LAZY_MEMO_SET_VALUE(result, value);
2197 LAZY_MEMO_RESET_PACKED(result);
2198 return result;
2199}
2200
2201static const lazyenum_funcs lazy_filter_map_funcs = {
2202 lazy_filter_map_proc, 0,
2203};
2204
2205/*
2206 * call-seq:
2207 * lazy.filter_map { |obj| block } -> lazy_enumerator
2208 *
2209 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2210 *
2211 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2212 * #=> [4, 8, 12, 16, 20]
2213 */
2214
2215static VALUE
2216lazy_filter_map(VALUE obj)
2217{
2218 if (!rb_block_given_p()) {
2219 rb_raise(rb_eArgError, "tried to call lazy filter_map without a block");
2220 }
2221
2222 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2223}
2224
2225static struct MEMO *
2226lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2227{
2228 VALUE chain = lazyenum_yield(proc_entry, result);
2229 if (RTEST(chain)) return 0;
2230 return result;
2231}
2232
2233static const lazyenum_funcs lazy_reject_funcs = {
2234 lazy_reject_proc, 0,
2235};
2236
2237/*
2238 * call-seq:
2239 * lazy.reject { |obj| block } -> lazy_enumerator
2240 *
2241 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2242 */
2243
2244static VALUE
2245lazy_reject(VALUE obj)
2246{
2247 if (!rb_block_given_p()) {
2248 rb_raise(rb_eArgError, "tried to call lazy reject without a block");
2249 }
2250
2251 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2252}
2253
2254static struct MEMO *
2255lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2256{
2257 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2258 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2259 if (!RTEST(chain)) return 0;
2260 return result;
2261}
2262
2263static struct MEMO *
2264lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2265{
2266 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2267 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2268
2269 if (!RTEST(chain)) return 0;
2270 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2271 LAZY_MEMO_SET_VALUE(result, value);
2272 LAZY_MEMO_RESET_PACKED(result);
2273
2274 return result;
2275}
2276
2277static const lazyenum_funcs lazy_grep_iter_funcs = {
2278 lazy_grep_iter_proc, 0,
2279};
2280
2281static const lazyenum_funcs lazy_grep_funcs = {
2282 lazy_grep_proc, 0,
2283};
2284
2285/*
2286 * call-seq:
2287 * lazy.grep(pattern) -> lazy_enumerator
2288 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2289 *
2290 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2291 */
2292
2293static VALUE
2294lazy_grep(VALUE obj, VALUE pattern)
2295{
2296 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2297 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2298 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2299}
2300
2301static struct MEMO *
2302lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2303{
2304 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2305 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2306 if (RTEST(chain)) return 0;
2307 return result;
2308}
2309
2310static struct MEMO *
2311lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2312{
2313 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2314 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2315
2316 if (RTEST(chain)) return 0;
2317 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2318 LAZY_MEMO_SET_VALUE(result, value);
2319 LAZY_MEMO_RESET_PACKED(result);
2320
2321 return result;
2322}
2323
2324static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2325 lazy_grep_v_iter_proc, 0,
2326};
2327
2328static const lazyenum_funcs lazy_grep_v_funcs = {
2329 lazy_grep_v_proc, 0,
2330};
2331
2332/*
2333 * call-seq:
2334 * lazy.grep_v(pattern) -> lazy_enumerator
2335 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2336 *
2337 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2338 */
2339
2340static VALUE
2341lazy_grep_v(VALUE obj, VALUE pattern)
2342{
2343 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2344 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2345 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2346}
2347
2348static VALUE
2349call_next(VALUE obj)
2350{
2351 return rb_funcall(obj, id_next, 0);
2352}
2353
2354static VALUE
2355next_stopped(VALUE obj, VALUE _)
2356{
2357 return Qnil;
2358}
2359
2360static struct MEMO *
2361lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2362{
2363 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2364 VALUE ary, arrays = entry->memo;
2365 VALUE memo = rb_ary_entry(memos, memo_index);
2366 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2367
2368 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2369 rb_ary_push(ary, result->memo_value);
2370 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2371 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2372 }
2373 LAZY_MEMO_SET_VALUE(result, ary);
2374 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2375 return result;
2376}
2377
2378static struct MEMO *
2379lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2380{
2381 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2382 VALUE arg = rb_ary_entry(memos, memo_index);
2383 VALUE zip_args = entry->memo;
2384 VALUE ary, v;
2385 long i;
2386
2387 if (NIL_P(arg)) {
2388 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2389 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2390 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2391 }
2392 rb_ary_store(memos, memo_index, arg);
2393 }
2394
2395 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2396 rb_ary_push(ary, result->memo_value);
2397 for (i = 0; i < RARRAY_LEN(arg); i++) {
2398 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2400 rb_ary_push(ary, v);
2401 }
2402 LAZY_MEMO_SET_VALUE(result, ary);
2403 return result;
2404}
2405
2406static const lazyenum_funcs lazy_zip_funcs[] = {
2407 {lazy_zip_func, lazy_receiver_size,},
2408 {lazy_zip_arrays_func, lazy_receiver_size,},
2409};
2410
2411/*
2412 * call-seq:
2413 * lazy.zip(arg, ...) -> lazy_enumerator
2414 * lazy.zip(arg, ...) { |arr| block } -> nil
2415 *
2416 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2417 * However, if a block is given to zip, values are enumerated immediately.
2418 */
2419static VALUE
2420lazy_zip(int argc, VALUE *argv, VALUE obj)
2421{
2422 VALUE ary, v;
2423 long i;
2424 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2425
2426 if (rb_block_given_p()) {
2427 return rb_call_super(argc, argv);
2428 }
2429
2430 ary = rb_ary_new2(argc);
2431 for (i = 0; i < argc; i++) {
2432 v = rb_check_array_type(argv[i]);
2433 if (NIL_P(v)) {
2434 for (; i < argc; i++) {
2435 if (!rb_respond_to(argv[i], id_each)) {
2436 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2437 rb_obj_class(argv[i]));
2438 }
2439 }
2440 ary = rb_ary_new4(argc, argv);
2441 funcs = &lazy_zip_funcs[0];
2442 break;
2443 }
2444 rb_ary_push(ary, v);
2445 }
2446
2447 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2448}
2449
2450static struct MEMO *
2451lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2452{
2453 long remain;
2454 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2455 VALUE memo = rb_ary_entry(memos, memo_index);
2456
2457 if (NIL_P(memo)) {
2458 memo = entry->memo;
2459 }
2460
2461 remain = NUM2LONG(memo);
2462 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2463 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2464 return result;
2465}
2466
2467static VALUE
2468lazy_take_size(VALUE entry, VALUE receiver)
2469{
2470 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2471 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2472 return receiver;
2473 return LONG2NUM(len);
2474}
2475
2476static int
2477lazy_take_precheck(VALUE proc_entry)
2478{
2479 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2480 return entry->memo != INT2FIX(0);
2481}
2482
2483static const lazyenum_funcs lazy_take_funcs = {
2484 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2485};
2486
2487/*
2488 * call-seq:
2489 * lazy.take(n) -> lazy_enumerator
2490 *
2491 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2492 */
2493
2494static VALUE
2495lazy_take(VALUE obj, VALUE n)
2496{
2497 long len = NUM2LONG(n);
2498
2499 if (len < 0) {
2500 rb_raise(rb_eArgError, "attempt to take negative size");
2501 }
2502
2503 n = LONG2NUM(len); /* no more conversion */
2504
2505 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2506}
2507
2508static struct MEMO *
2509lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2510{
2511 VALUE take = lazyenum_yield_values(proc_entry, result);
2512 if (!RTEST(take)) {
2513 LAZY_MEMO_SET_BREAK(result);
2514 return 0;
2515 }
2516 return result;
2517}
2518
2519static const lazyenum_funcs lazy_take_while_funcs = {
2520 lazy_take_while_proc, 0,
2521};
2522
2523/*
2524 * call-seq:
2525 * lazy.take_while { |obj| block } -> lazy_enumerator
2526 *
2527 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2528 */
2529
2530static VALUE
2531lazy_take_while(VALUE obj)
2532{
2533 if (!rb_block_given_p()) {
2534 rb_raise(rb_eArgError, "tried to call lazy take_while without a block");
2535 }
2536
2537 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2538}
2539
2540static VALUE
2541lazy_drop_size(VALUE proc_entry, VALUE receiver)
2542{
2543 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2544 if (NIL_P(receiver))
2545 return receiver;
2546 if (FIXNUM_P(receiver)) {
2547 len = FIX2LONG(receiver) - len;
2548 return LONG2FIX(len < 0 ? 0 : len);
2549 }
2550 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2551}
2552
2553static struct MEMO *
2554lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2555{
2556 long remain;
2557 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2558 VALUE memo = rb_ary_entry(memos, memo_index);
2559
2560 if (NIL_P(memo)) {
2561 memo = entry->memo;
2562 }
2563 remain = NUM2LONG(memo);
2564 if (remain > 0) {
2565 --remain;
2566 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2567 return 0;
2568 }
2569
2570 return result;
2571}
2572
2573static const lazyenum_funcs lazy_drop_funcs = {
2574 lazy_drop_proc, lazy_drop_size,
2575};
2576
2577/*
2578 * call-seq:
2579 * lazy.drop(n) -> lazy_enumerator
2580 *
2581 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2582 */
2583
2584static VALUE
2585lazy_drop(VALUE obj, VALUE n)
2586{
2587 long len = NUM2LONG(n);
2588 VALUE argv[2];
2589 argv[0] = sym_each;
2590 argv[1] = n;
2591
2592 if (len < 0) {
2593 rb_raise(rb_eArgError, "attempt to drop negative size");
2594 }
2595
2596 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2597}
2598
2599static struct MEMO *
2600lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2601{
2602 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2603 VALUE memo = rb_ary_entry(memos, memo_index);
2604
2605 if (NIL_P(memo)) {
2606 memo = entry->memo;
2607 }
2608
2609 if (!RTEST(memo)) {
2610 VALUE drop = lazyenum_yield_values(proc_entry, result);
2611 if (RTEST(drop)) return 0;
2612 rb_ary_store(memos, memo_index, Qtrue);
2613 }
2614 return result;
2615}
2616
2617static const lazyenum_funcs lazy_drop_while_funcs = {
2618 lazy_drop_while_proc, 0,
2619};
2620
2621/*
2622 * call-seq:
2623 * lazy.drop_while { |obj| block } -> lazy_enumerator
2624 *
2625 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2626 */
2627
2628static VALUE
2629lazy_drop_while(VALUE obj)
2630{
2631 if (!rb_block_given_p()) {
2632 rb_raise(rb_eArgError, "tried to call lazy drop_while without a block");
2633 }
2634
2635 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2636}
2637
2638static int
2639lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2640{
2641 VALUE hash = rb_ary_entry(memos, memo_index);
2642
2643 if (NIL_P(hash)) {
2644 hash = rb_obj_hide(rb_hash_new());
2645 rb_ary_store(memos, memo_index, hash);
2646 }
2647
2648 return rb_hash_add_new_element(hash, chain, Qfalse);
2649}
2650
2651static struct MEMO *
2652lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2653{
2654 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2655 return result;
2656}
2657
2658static struct MEMO *
2659lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2660{
2661 VALUE chain = lazyenum_yield(proc_entry, result);
2662
2663 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2664 return result;
2665}
2666
2667static const lazyenum_funcs lazy_uniq_iter_funcs = {
2668 lazy_uniq_iter_proc, 0,
2669};
2670
2671static const lazyenum_funcs lazy_uniq_funcs = {
2672 lazy_uniq_proc, 0,
2673};
2674
2675/*
2676 * call-seq:
2677 * lazy.uniq -> lazy_enumerator
2678 * lazy.uniq { |item| block } -> lazy_enumerator
2679 *
2680 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2681 */
2682
2683static VALUE
2684lazy_uniq(VALUE obj)
2685{
2686 const lazyenum_funcs *const funcs =
2687 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2688 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2689}
2690
2691static struct MEMO *
2692lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2693{
2694 if (NIL_P(result->memo_value)) return 0;
2695 return result;
2696}
2697
2698static const lazyenum_funcs lazy_compact_funcs = {
2699 lazy_compact_proc, 0,
2700};
2701
2702/*
2703 * call-seq:
2704 * lazy.compact -> lazy_enumerator
2705 *
2706 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2707 */
2708
2709static VALUE
2710lazy_compact(VALUE obj)
2711{
2712 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2713}
2714
2715static struct MEMO *
2716lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2717{
2718 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2719 VALUE memo = rb_ary_entry(memos, memo_index);
2720 VALUE argv[2];
2721
2722 if (NIL_P(memo)) {
2723 memo = entry->memo;
2724 }
2725
2726 argv[0] = result->memo_value;
2727 argv[1] = memo;
2728 if (entry->proc) {
2729 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2730 LAZY_MEMO_RESET_PACKED(result);
2731 }
2732 else {
2733 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2734 LAZY_MEMO_SET_PACKED(result);
2735 }
2736 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2737 return result;
2738}
2739
2740static VALUE
2741lazy_with_index_size(VALUE proc, VALUE receiver)
2742{
2743 return receiver;
2744}
2745
2746static const lazyenum_funcs lazy_with_index_funcs = {
2747 lazy_with_index_proc, lazy_with_index_size,
2748};
2749
2750/*
2751 * call-seq:
2752 * lazy.with_index(offset = 0) {|(*args), idx| block }
2753 * lazy.with_index(offset = 0)
2754 *
2755 * If a block is given, returns a lazy enumerator that will
2756 * iterate over the given block for each element
2757 * with an index, which starts from +offset+, and returns a
2758 * lazy enumerator that yields the same values (without the index).
2759 *
2760 * If a block is not given, returns a new lazy enumerator that
2761 * includes the index, starting from +offset+.
2762 *
2763 * +offset+:: the starting index to use
2764 *
2765 * See Enumerator#with_index.
2766 */
2767static VALUE
2768lazy_with_index(int argc, VALUE *argv, VALUE obj)
2769{
2770 VALUE memo;
2771
2772 rb_scan_args(argc, argv, "01", &memo);
2773 if (NIL_P(memo))
2774 memo = LONG2NUM(0);
2775
2776 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2777}
2778
2779#if 0 /* for RDoc */
2780
2781/*
2782 * call-seq:
2783 * lazy.chunk { |elt| ... } -> lazy_enumerator
2784 *
2785 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2786 */
2787static VALUE
2788lazy_chunk(VALUE self)
2789{
2790}
2791
2792/*
2793 * call-seq:
2794 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2795 *
2796 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2797 */
2798static VALUE
2799lazy_chunk_while(VALUE self)
2800{
2801}
2802
2803/*
2804 * call-seq:
2805 * lazy.slice_after(pattern) -> lazy_enumerator
2806 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2807 *
2808 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2809 */
2810static VALUE
2811lazy_slice_after(VALUE self)
2812{
2813}
2814
2815/*
2816 * call-seq:
2817 * lazy.slice_before(pattern) -> lazy_enumerator
2818 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2819 *
2820 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2821 */
2822static VALUE
2823lazy_slice_before(VALUE self)
2824{
2825}
2826
2827/*
2828 * call-seq:
2829 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2830 *
2831 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2832 */
2833static VALUE
2834lazy_slice_when(VALUE self)
2835{
2836}
2837# endif
2838
2839static VALUE
2840lazy_super(int argc, VALUE *argv, VALUE lazy)
2841{
2842 return enumerable_lazy(rb_call_super(argc, argv));
2843}
2844
2845/*
2846 * call-seq:
2847 * enum.lazy -> lazy_enumerator
2848 *
2849 * Returns self.
2850 */
2851
2852static VALUE
2853lazy_lazy(VALUE obj)
2854{
2855 return obj;
2856}
2857
2858/*
2859 * Document-class: StopIteration
2860 *
2861 * Raised to stop the iteration, in particular by Enumerator#next. It is
2862 * rescued by Kernel#loop.
2863 *
2864 * loop do
2865 * puts "Hello"
2866 * raise StopIteration
2867 * puts "World"
2868 * end
2869 * puts "Done!"
2870 *
2871 * <em>produces:</em>
2872 *
2873 * Hello
2874 * Done!
2875 */
2876
2877/*
2878 * call-seq:
2879 * result -> value
2880 *
2881 * Returns the return value of the iterator.
2882 *
2883 * o = Object.new
2884 * def o.each
2885 * yield 1
2886 * yield 2
2887 * yield 3
2888 * 100
2889 * end
2890 *
2891 * e = o.to_enum
2892 *
2893 * puts e.next #=> 1
2894 * puts e.next #=> 2
2895 * puts e.next #=> 3
2896 *
2897 * begin
2898 * e.next
2899 * rescue StopIteration => ex
2900 * puts ex.result #=> 100
2901 * end
2902 *
2903 */
2904
2905static VALUE
2906stop_result(VALUE self)
2907{
2908 return rb_attr_get(self, id_result);
2909}
2910
2911/*
2912 * Producer
2913 */
2914
2915static void
2916producer_mark(void *p)
2917{
2918 struct producer *ptr = p;
2919 rb_gc_mark_movable(ptr->init);
2920 rb_gc_mark_movable(ptr->proc);
2921}
2922
2923static void
2924producer_compact(void *p)
2925{
2926 struct producer *ptr = p;
2927 ptr->init = rb_gc_location(ptr->init);
2928 ptr->proc = rb_gc_location(ptr->proc);
2929}
2930
2931#define producer_free RUBY_TYPED_DEFAULT_FREE
2932
2933static size_t
2934producer_memsize(const void *p)
2935{
2936 return sizeof(struct producer);
2937}
2938
2939static const rb_data_type_t producer_data_type = {
2940 "producer",
2941 {
2942 producer_mark,
2943 producer_free,
2944 producer_memsize,
2945 producer_compact,
2946 },
2947 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
2948};
2949
2950static struct producer *
2951producer_ptr(VALUE obj)
2952{
2953 struct producer *ptr;
2954
2955 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2956 if (!ptr || UNDEF_P(ptr->proc)) {
2957 rb_raise(rb_eArgError, "uninitialized producer");
2958 }
2959 return ptr;
2960}
2961
2962/* :nodoc: */
2963static VALUE
2964producer_allocate(VALUE klass)
2965{
2966 struct producer *ptr;
2967 VALUE obj;
2968
2969 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
2970 ptr->init = Qundef;
2971 ptr->proc = Qundef;
2972
2973 return obj;
2974}
2975
2976static VALUE
2977producer_init(VALUE obj, VALUE init, VALUE proc)
2978{
2979 struct producer *ptr;
2980
2981 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2982
2983 if (!ptr) {
2984 rb_raise(rb_eArgError, "unallocated producer");
2985 }
2986
2987 RB_OBJ_WRITE(obj, &ptr->init, init);
2988 RB_OBJ_WRITE(obj, &ptr->proc, proc);
2989
2990 return obj;
2991}
2992
2993static VALUE
2994producer_each_stop(VALUE dummy, VALUE exc)
2995{
2996 return rb_attr_get(exc, id_result);
2997}
2998
2999NORETURN(static VALUE producer_each_i(VALUE obj));
3000
3001static VALUE
3002producer_each_i(VALUE obj)
3003{
3004 struct producer *ptr;
3005 VALUE init, proc, curr;
3006
3007 ptr = producer_ptr(obj);
3008 init = ptr->init;
3009 proc = ptr->proc;
3010
3011 if (UNDEF_P(init)) {
3012 curr = Qnil;
3013 }
3014 else {
3015 rb_yield(init);
3016 curr = init;
3017 }
3018
3019 for (;;) {
3020 curr = rb_funcall(proc, id_call, 1, curr);
3021 rb_yield(curr);
3022 }
3023
3025}
3026
3027/* :nodoc: */
3028static VALUE
3029producer_each(VALUE obj)
3030{
3031 rb_need_block();
3032
3033 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
3034}
3035
3036static VALUE
3037producer_size(VALUE obj, VALUE args, VALUE eobj)
3038{
3039 return DBL2NUM(HUGE_VAL);
3040}
3041
3042/*
3043 * call-seq:
3044 * Enumerator.produce(initial = nil) { |prev| block } -> enumerator
3045 *
3046 * Creates an infinite enumerator from any block, just called over and
3047 * over. The result of the previous iteration is passed to the next one.
3048 * If +initial+ is provided, it is passed to the first iteration, and
3049 * becomes the first element of the enumerator; if it is not provided,
3050 * the first iteration receives +nil+, and its result becomes the first
3051 * element of the iterator.
3052 *
3053 * Raising StopIteration from the block stops an iteration.
3054 *
3055 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3056 *
3057 * Enumerator.produce { rand(10) } # => infinite random number sequence
3058 *
3059 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3060 * enclosing_section = ancestors.find { |n| n.type == :section }
3061 *
3062 * Using ::produce together with Enumerable methods like Enumerable#detect,
3063 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3064 * for +while+ and +until+ cycles:
3065 *
3066 * # Find next Tuesday
3067 * require "date"
3068 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3069 *
3070 * # Simple lexer:
3071 * require "strscan"
3072 * scanner = StringScanner.new("7+38/6")
3073 * PATTERN = %r{\d+|[-/+*]}
3074 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3075 * # => ["7", "+", "38", "/", "6"]
3076 */
3077static VALUE
3078enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3079{
3080 VALUE init, producer;
3081
3082 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3083
3084 if (rb_scan_args(argc, argv, "01", &init) == 0) {
3085 init = Qundef;
3086 }
3087
3088 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc());
3089
3090 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3091}
3092
3093/*
3094 * Document-class: Enumerator::Chain
3095 *
3096 * Enumerator::Chain is a subclass of Enumerator, which represents a
3097 * chain of enumerables that works as a single enumerator.
3098 *
3099 * This type of objects can be created by Enumerable#chain and
3100 * Enumerator#+.
3101 */
3102
3103static void
3104enum_chain_mark(void *p)
3105{
3106 struct enum_chain *ptr = p;
3107 rb_gc_mark_movable(ptr->enums);
3108}
3109
3110static void
3111enum_chain_compact(void *p)
3112{
3113 struct enum_chain *ptr = p;
3114 ptr->enums = rb_gc_location(ptr->enums);
3115}
3116
3117#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3118
3119static size_t
3120enum_chain_memsize(const void *p)
3121{
3122 return sizeof(struct enum_chain);
3123}
3124
3125static const rb_data_type_t enum_chain_data_type = {
3126 "chain",
3127 {
3128 enum_chain_mark,
3129 enum_chain_free,
3130 enum_chain_memsize,
3131 enum_chain_compact,
3132 },
3133 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3134};
3135
3136static struct enum_chain *
3137enum_chain_ptr(VALUE obj)
3138{
3139 struct enum_chain *ptr;
3140
3141 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3142 if (!ptr || UNDEF_P(ptr->enums)) {
3143 rb_raise(rb_eArgError, "uninitialized chain");
3144 }
3145 return ptr;
3146}
3147
3148/* :nodoc: */
3149static VALUE
3150enum_chain_allocate(VALUE klass)
3151{
3152 struct enum_chain *ptr;
3153 VALUE obj;
3154
3155 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3156 ptr->enums = Qundef;
3157 ptr->pos = -1;
3158
3159 return obj;
3160}
3161
3162/*
3163 * call-seq:
3164 * Enumerator::Chain.new(*enums) -> enum
3165 *
3166 * Generates a new enumerator object that iterates over the elements
3167 * of given enumerable objects in sequence.
3168 *
3169 * e = Enumerator::Chain.new(1..3, [4, 5])
3170 * e.to_a #=> [1, 2, 3, 4, 5]
3171 * e.size #=> 5
3172 */
3173static VALUE
3174enum_chain_initialize(VALUE obj, VALUE enums)
3175{
3176 struct enum_chain *ptr;
3177
3178 rb_check_frozen(obj);
3179 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3180
3181 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3182
3183 ptr->enums = rb_ary_freeze(enums);
3184 ptr->pos = -1;
3185
3186 return obj;
3187}
3188
3189static VALUE
3190new_enum_chain(VALUE enums)
3191{
3192 long i;
3193 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3194
3195 for (i = 0; i < RARRAY_LEN(enums); i++) {
3196 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3197 return enumerable_lazy(obj);
3198 }
3199 }
3200
3201 return obj;
3202}
3203
3204/* :nodoc: */
3205static VALUE
3206enum_chain_init_copy(VALUE obj, VALUE orig)
3207{
3208 struct enum_chain *ptr0, *ptr1;
3209
3210 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3211 ptr0 = enum_chain_ptr(orig);
3212
3213 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3214
3215 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3216
3217 ptr1->enums = ptr0->enums;
3218 ptr1->pos = ptr0->pos;
3219
3220 return obj;
3221}
3222
3223static VALUE
3224enum_chain_total_size(VALUE enums)
3225{
3226 VALUE total = INT2FIX(0);
3227 long i;
3228
3229 for (i = 0; i < RARRAY_LEN(enums); i++) {
3230 VALUE size = enum_size(RARRAY_AREF(enums, i));
3231
3232 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3233 return size;
3234 }
3235 if (!RB_INTEGER_TYPE_P(size)) {
3236 return Qnil;
3237 }
3238
3239 total = rb_funcall(total, '+', 1, size);
3240 }
3241
3242 return total;
3243}
3244
3245/*
3246 * call-seq:
3247 * obj.size -> int, Float::INFINITY or nil
3248 *
3249 * Returns the total size of the enumerator chain calculated by
3250 * summing up the size of each enumerable in the chain. If any of the
3251 * enumerables reports its size as nil or Float::INFINITY, that value
3252 * is returned as the total size.
3253 */
3254static VALUE
3255enum_chain_size(VALUE obj)
3256{
3257 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3258}
3259
3260static VALUE
3261enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3262{
3263 return enum_chain_size(obj);
3264}
3265
3266static VALUE
3267enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3268{
3269 return Qnil;
3270}
3271
3272/*
3273 * call-seq:
3274 * obj.each(*args) { |...| ... } -> obj
3275 * obj.each(*args) -> enumerator
3276 *
3277 * Iterates over the elements of the first enumerable by calling the
3278 * "each" method on it with the given arguments, then proceeds to the
3279 * following enumerables in sequence until all of the enumerables are
3280 * exhausted.
3281 *
3282 * If no block is given, returns an enumerator.
3283 */
3284static VALUE
3285enum_chain_each(int argc, VALUE *argv, VALUE obj)
3286{
3287 VALUE enums, block;
3288 struct enum_chain *objptr;
3289 long i;
3290
3291 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3292
3293 objptr = enum_chain_ptr(obj);
3294 enums = objptr->enums;
3295 block = rb_block_proc();
3296
3297 for (i = 0; i < RARRAY_LEN(enums); i++) {
3298 objptr->pos = i;
3299 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3300 }
3301
3302 return obj;
3303}
3304
3305/*
3306 * call-seq:
3307 * obj.rewind -> obj
3308 *
3309 * Rewinds the enumerator chain by calling the "rewind" method on each
3310 * enumerable in reverse order. Each call is performed only if the
3311 * enumerable responds to the method.
3312 */
3313static VALUE
3314enum_chain_rewind(VALUE obj)
3315{
3316 struct enum_chain *objptr = enum_chain_ptr(obj);
3317 VALUE enums = objptr->enums;
3318 long i;
3319
3320 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3321 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3322 }
3323
3324 return obj;
3325}
3326
3327static VALUE
3328inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3329{
3330 VALUE klass = rb_obj_class(obj);
3331 struct enum_chain *ptr;
3332
3333 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3334
3335 if (!ptr || UNDEF_P(ptr->enums)) {
3336 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3337 }
3338
3339 if (recur) {
3340 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3341 }
3342
3343 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3344}
3345
3346/*
3347 * call-seq:
3348 * obj.inspect -> string
3349 *
3350 * Returns a printable version of the enumerator chain.
3351 */
3352static VALUE
3353enum_chain_inspect(VALUE obj)
3354{
3355 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3356}
3357
3358/*
3359 * call-seq:
3360 * e.chain(*enums) -> enumerator
3361 *
3362 * Returns an enumerator object generated from this enumerator and
3363 * given enumerables.
3364 *
3365 * e = (1..3).chain([4, 5])
3366 * e.to_a #=> [1, 2, 3, 4, 5]
3367 */
3368static VALUE
3369enum_chain(int argc, VALUE *argv, VALUE obj)
3370{
3371 VALUE enums = rb_ary_new_from_values(1, &obj);
3372 rb_ary_cat(enums, argv, argc);
3373 return new_enum_chain(enums);
3374}
3375
3376/*
3377 * call-seq:
3378 * e + enum -> enumerator
3379 *
3380 * Returns an enumerator object generated from this enumerator and a
3381 * given enumerable.
3382 *
3383 * e = (1..3).each + [4, 5]
3384 * e.to_a #=> [1, 2, 3, 4, 5]
3385 */
3386static VALUE
3387enumerator_plus(VALUE obj, VALUE eobj)
3388{
3389 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3390}
3391
3392/*
3393 * Document-class: Enumerator::Product
3394 *
3395 * Enumerator::Product generates a Cartesian product of any number of
3396 * enumerable objects. Iterating over the product of enumerable
3397 * objects is roughly equivalent to nested each_entry loops where the
3398 * loop for the rightmost object is put innermost.
3399 *
3400 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3401 *
3402 * innings.each do |i, h|
3403 * p [i, h]
3404 * end
3405 * # [1, "top"]
3406 * # [1, "bottom"]
3407 * # [2, "top"]
3408 * # [2, "bottom"]
3409 * # [3, "top"]
3410 * # [3, "bottom"]
3411 * # ...
3412 * # [9, "top"]
3413 * # [9, "bottom"]
3414 *
3415 * The method used against each enumerable object is `each_entry`
3416 * instead of `each` so that the product of N enumerable objects
3417 * yields an array of exactly N elements in each iteration.
3418 *
3419 * When no enumerator is given, it calls a given block once yielding
3420 * an empty argument list.
3421 *
3422 * This type of objects can be created by Enumerator.product.
3423 */
3424
3425static void
3426enum_product_mark(void *p)
3427{
3428 struct enum_product *ptr = p;
3429 rb_gc_mark_movable(ptr->enums);
3430}
3431
3432static void
3433enum_product_compact(void *p)
3434{
3435 struct enum_product *ptr = p;
3436 ptr->enums = rb_gc_location(ptr->enums);
3437}
3438
3439#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3440
3441static size_t
3442enum_product_memsize(const void *p)
3443{
3444 return sizeof(struct enum_product);
3445}
3446
3447static const rb_data_type_t enum_product_data_type = {
3448 "product",
3449 {
3450 enum_product_mark,
3451 enum_product_free,
3452 enum_product_memsize,
3453 enum_product_compact,
3454 },
3455 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3456};
3457
3458static struct enum_product *
3459enum_product_ptr(VALUE obj)
3460{
3461 struct enum_product *ptr;
3462
3463 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3464 if (!ptr || UNDEF_P(ptr->enums)) {
3465 rb_raise(rb_eArgError, "uninitialized product");
3466 }
3467 return ptr;
3468}
3469
3470/* :nodoc: */
3471static VALUE
3472enum_product_allocate(VALUE klass)
3473{
3474 struct enum_product *ptr;
3475 VALUE obj;
3476
3477 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3478 ptr->enums = Qundef;
3479
3480 return obj;
3481}
3482
3483/*
3484 * call-seq:
3485 * Enumerator::Product.new(*enums) -> enum
3486 *
3487 * Generates a new enumerator object that generates a Cartesian
3488 * product of given enumerable objects.
3489 *
3490 * e = Enumerator::Product.new(1..3, [4, 5])
3491 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3492 * e.size #=> 6
3493 */
3494static VALUE
3495enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3496{
3497 struct enum_product *ptr;
3498 VALUE enums = Qnil, options = Qnil;
3499
3500 rb_scan_args(argc, argv, "*:", &enums, &options);
3501
3502 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3503 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3504 }
3505
3506 rb_check_frozen(obj);
3507 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3508
3509 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3510
3511 ptr->enums = rb_ary_freeze(enums);
3512
3513 return obj;
3514}
3515
3516/* :nodoc: */
3517static VALUE
3518enum_product_init_copy(VALUE obj, VALUE orig)
3519{
3520 struct enum_product *ptr0, *ptr1;
3521
3522 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3523 ptr0 = enum_product_ptr(orig);
3524
3525 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3526
3527 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3528
3529 ptr1->enums = ptr0->enums;
3530
3531 return obj;
3532}
3533
3534static VALUE
3535enum_product_total_size(VALUE enums)
3536{
3537 VALUE total = INT2FIX(1);
3538 VALUE sizes = rb_ary_hidden_new(RARRAY_LEN(enums));
3539 long i;
3540
3541 for (i = 0; i < RARRAY_LEN(enums); i++) {
3542 VALUE size = enum_size(RARRAY_AREF(enums, i));
3543 if (size == INT2FIX(0)) {
3544 rb_ary_resize(sizes, 0);
3545 return size;
3546 }
3547 rb_ary_push(sizes, size);
3548 }
3549 for (i = 0; i < RARRAY_LEN(sizes); i++) {
3550 VALUE size = RARRAY_AREF(sizes, i);
3551
3552 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3553 return size;
3554 }
3555 if (!RB_INTEGER_TYPE_P(size)) {
3556 return Qnil;
3557 }
3558
3559 total = rb_funcall(total, '*', 1, size);
3560 }
3561
3562 return total;
3563}
3564
3565/*
3566 * call-seq:
3567 * obj.size -> int, Float::INFINITY or nil
3568 *
3569 * Returns the total size of the enumerator product calculated by
3570 * multiplying the sizes of enumerables in the product. If any of the
3571 * enumerables reports its size as nil or Float::INFINITY, that value
3572 * is returned as the size.
3573 */
3574static VALUE
3575enum_product_size(VALUE obj)
3576{
3577 return enum_product_total_size(enum_product_ptr(obj)->enums);
3578}
3579
3580static VALUE
3581enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3582{
3583 return enum_product_size(obj);
3584}
3585
3587 VALUE obj;
3588 VALUE block;
3589 int argc;
3590 VALUE *argv;
3591 int index;
3592};
3593
3594static VALUE product_each(VALUE, struct product_state *);
3595
3596static VALUE
3597product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3598{
3599 struct product_state *pstate = (struct product_state *)state;
3600 pstate->argv[pstate->index++] = value;
3601
3602 VALUE val = product_each(pstate->obj, pstate);
3603 pstate->index--;
3604 return val;
3605}
3606
3607static VALUE
3608product_each(VALUE obj, struct product_state *pstate)
3609{
3610 struct enum_product *ptr = enum_product_ptr(obj);
3611 VALUE enums = ptr->enums;
3612
3613 if (pstate->index < pstate->argc) {
3614 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3615
3616 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3617 }
3618 else {
3619 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3620 }
3621
3622 return obj;
3623}
3624
3625static VALUE
3626enum_product_run(VALUE obj, VALUE block)
3627{
3628 struct enum_product *ptr = enum_product_ptr(obj);
3629 int argc = RARRAY_LENINT(ptr->enums);
3630 struct product_state state = {
3631 .obj = obj,
3632 .block = block,
3633 .index = 0,
3634 .argc = argc,
3635 .argv = ALLOCA_N(VALUE, argc),
3636 };
3637
3638 return product_each(obj, &state);
3639}
3640
3641/*
3642 * call-seq:
3643 * obj.each { |...| ... } -> obj
3644 * obj.each -> enumerator
3645 *
3646 * Iterates over the elements of the first enumerable by calling the
3647 * "each_entry" method on it with the given arguments, then proceeds
3648 * to the following enumerables in sequence until all of the
3649 * enumerables are exhausted.
3650 *
3651 * If no block is given, returns an enumerator. Otherwise, returns self.
3652 */
3653static VALUE
3654enum_product_each(VALUE obj)
3655{
3656 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3657
3658 return enum_product_run(obj, rb_block_proc());
3659}
3660
3661/*
3662 * call-seq:
3663 * obj.rewind -> obj
3664 *
3665 * Rewinds the product enumerator by calling the "rewind" method on
3666 * each enumerable in reverse order. Each call is performed only if
3667 * the enumerable responds to the method.
3668 */
3669static VALUE
3670enum_product_rewind(VALUE obj)
3671{
3672 struct enum_product *ptr = enum_product_ptr(obj);
3673 VALUE enums = ptr->enums;
3674 long i;
3675
3676 for (i = 0; i < RARRAY_LEN(enums); i++) {
3677 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3678 }
3679
3680 return obj;
3681}
3682
3683static VALUE
3684inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3685{
3686 VALUE klass = rb_obj_class(obj);
3687 struct enum_product *ptr;
3688
3689 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3690
3691 if (!ptr || UNDEF_P(ptr->enums)) {
3692 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3693 }
3694
3695 if (recur) {
3696 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3697 }
3698
3699 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3700}
3701
3702/*
3703 * call-seq:
3704 * obj.inspect -> string
3705 *
3706 * Returns a printable version of the product enumerator.
3707 */
3708static VALUE
3709enum_product_inspect(VALUE obj)
3710{
3711 return rb_exec_recursive(inspect_enum_product, obj, 0);
3712}
3713
3714/*
3715 * call-seq:
3716 * Enumerator.product(*enums) -> enumerator
3717 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3718 *
3719 * Generates a new enumerator object that generates a Cartesian
3720 * product of given enumerable objects. This is equivalent to
3721 * Enumerator::Product.new.
3722 *
3723 * e = Enumerator.product(1..3, [4, 5])
3724 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3725 * e.size #=> 6
3726 *
3727 * When a block is given, calls the block with each N-element array
3728 * generated and returns +nil+.
3729 */
3730static VALUE
3731enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3732{
3733 VALUE enums = Qnil, options = Qnil, block = Qnil;
3734
3735 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3736
3737 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3738 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3739 }
3740
3741 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3742
3743 if (!NIL_P(block)) {
3744 enum_product_run(obj, block);
3745 return Qnil;
3746 }
3747
3748 return obj;
3749}
3750
3751/*
3752 * Document-class: Enumerator::ArithmeticSequence
3753 *
3754 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3755 * that is a representation of sequences of numbers with common difference.
3756 * Instances of this class can be generated by the Range#step and Numeric#step
3757 * methods.
3758 *
3759 * The class can be used for slicing Array (see Array#slice) or custom
3760 * collections.
3761 */
3762
3763VALUE
3764rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3765 rb_enumerator_size_func *size_fn,
3766 VALUE beg, VALUE end, VALUE step, int excl)
3767{
3768 VALUE aseq = enumerator_init(enumerator_allocate(rb_cArithSeq),
3769 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3770 rb_ivar_set(aseq, id_begin, beg);
3771 rb_ivar_set(aseq, id_end, end);
3772 rb_ivar_set(aseq, id_step, step);
3773 rb_ivar_set(aseq, id_exclude_end, RBOOL(excl));
3774 return aseq;
3775}
3776
3777/*
3778 * call-seq: aseq.begin -> num or nil
3779 *
3780 * Returns the number that defines the first element of this arithmetic
3781 * sequence.
3782 */
3783static inline VALUE
3784arith_seq_begin(VALUE self)
3785{
3786 return rb_ivar_get(self, id_begin);
3787}
3788
3789/*
3790 * call-seq: aseq.end -> num or nil
3791 *
3792 * Returns the number that defines the end of this arithmetic sequence.
3793 */
3794static inline VALUE
3795arith_seq_end(VALUE self)
3796{
3797 return rb_ivar_get(self, id_end);
3798}
3799
3800/*
3801 * call-seq: aseq.step -> num
3802 *
3803 * Returns the number that defines the common difference between
3804 * two adjacent elements in this arithmetic sequence.
3805 */
3806static inline VALUE
3807arith_seq_step(VALUE self)
3808{
3809 return rb_ivar_get(self, id_step);
3810}
3811
3812/*
3813 * call-seq: aseq.exclude_end? -> true or false
3814 *
3815 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3816 */
3817static inline VALUE
3818arith_seq_exclude_end(VALUE self)
3819{
3820 return rb_ivar_get(self, id_exclude_end);
3821}
3822
3823static inline int
3824arith_seq_exclude_end_p(VALUE self)
3825{
3826 return RTEST(arith_seq_exclude_end(self));
3827}
3828
3829int
3830rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3831{
3832 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3833 component->begin = arith_seq_begin(obj);
3834 component->end = arith_seq_end(obj);
3835 component->step = arith_seq_step(obj);
3836 component->exclude_end = arith_seq_exclude_end_p(obj);
3837 return 1;
3838 }
3839 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3840 component->step = INT2FIX(1);
3841 return 1;
3842 }
3843
3844 return 0;
3845}
3846
3847VALUE
3848rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3849{
3850 RBIMPL_NONNULL_ARG(begp);
3851 RBIMPL_NONNULL_ARG(lenp);
3852 RBIMPL_NONNULL_ARG(stepp);
3853
3855 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3856 return Qfalse;
3857 }
3858
3859 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3860 *stepp = step;
3861
3862 if (step < 0) {
3863 if (aseq.exclude_end && !NIL_P(aseq.end)) {
3864 /* Handle exclusion before range reversal */
3865 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
3866
3867 /* Don't exclude the previous beginning */
3868 aseq.exclude_end = 0;
3869 }
3870 VALUE tmp = aseq.begin;
3871 aseq.begin = aseq.end;
3872 aseq.end = tmp;
3873 }
3874
3875 if (err == 0 && (step < -1 || step > 1)) {
3876 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
3877 if (*begp > len)
3878 goto out_of_range;
3879 if (*lenp > len)
3880 goto out_of_range;
3881 return Qtrue;
3882 }
3883 }
3884 else {
3885 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
3886 }
3887
3888 out_of_range:
3889 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
3890 return Qnil;
3891}
3892
3893/*
3894 * call-seq:
3895 * aseq.first -> num or nil
3896 * aseq.first(n) -> an_array
3897 *
3898 * Returns the first number in this arithmetic sequence,
3899 * or an array of the first +n+ elements.
3900 */
3901static VALUE
3902arith_seq_first(int argc, VALUE *argv, VALUE self)
3903{
3904 VALUE b, e, s, ary;
3905 long n;
3906 int x;
3907
3908 rb_check_arity(argc, 0, 1);
3909
3910 b = arith_seq_begin(self);
3911 e = arith_seq_end(self);
3912 s = arith_seq_step(self);
3913 if (argc == 0) {
3914 if (NIL_P(b)) {
3915 return Qnil;
3916 }
3917 if (!NIL_P(e)) {
3918 VALUE zero = INT2FIX(0);
3919 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
3920 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
3921 return Qnil;
3922 }
3923 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
3924 return Qnil;
3925 }
3926 }
3927 return b;
3928 }
3929
3930 // TODO: the following code should be extracted as arith_seq_take
3931
3932 n = NUM2LONG(argv[0]);
3933 if (n < 0) {
3934 rb_raise(rb_eArgError, "attempt to take negative size");
3935 }
3936 if (n == 0) {
3937 return rb_ary_new_capa(0);
3938 }
3939
3940 x = arith_seq_exclude_end_p(self);
3941
3942 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
3943 long i = FIX2LONG(b), unit = FIX2LONG(s);
3944 ary = rb_ary_new_capa(n);
3945 while (n > 0 && FIXABLE(i)) {
3946 rb_ary_push(ary, LONG2FIX(i));
3947 i += unit; // FIXABLE + FIXABLE never overflow;
3948 --n;
3949 }
3950 if (n > 0) {
3951 b = LONG2NUM(i);
3952 while (n > 0) {
3953 rb_ary_push(ary, b);
3954 b = rb_big_plus(b, s);
3955 --n;
3956 }
3957 }
3958 return ary;
3959 }
3960 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
3961 long i = FIX2LONG(b);
3962 long end = FIX2LONG(e);
3963 long unit = FIX2LONG(s);
3964 long len;
3965
3966 if (unit >= 0) {
3967 if (!x) end += 1;
3968
3969 len = end - i;
3970 if (len < 0) len = 0;
3971 ary = rb_ary_new_capa((n < len) ? n : len);
3972 while (n > 0 && i < end) {
3973 rb_ary_push(ary, LONG2FIX(i));
3974 if (i + unit < i) break;
3975 i += unit;
3976 --n;
3977 }
3978 }
3979 else {
3980 if (!x) end -= 1;
3981
3982 len = i - end;
3983 if (len < 0) len = 0;
3984 ary = rb_ary_new_capa((n < len) ? n : len);
3985 while (n > 0 && i > end) {
3986 rb_ary_push(ary, LONG2FIX(i));
3987 if (i + unit > i) break;
3988 i += unit;
3989 --n;
3990 }
3991 }
3992 return ary;
3993 }
3994 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
3995 /* generate values like ruby_float_step */
3996
3997 double unit = NUM2DBL(s);
3998 double beg = NUM2DBL(b);
3999 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
4000 double len = ruby_float_step_size(beg, end, unit, x);
4001 long i;
4002
4003 if (n > len)
4004 n = (long)len;
4005
4006 if (isinf(unit)) {
4007 if (len > 0) {
4008 ary = rb_ary_new_capa(1);
4009 rb_ary_push(ary, DBL2NUM(beg));
4010 }
4011 else {
4012 ary = rb_ary_new_capa(0);
4013 }
4014 }
4015 else if (unit == 0) {
4016 VALUE val = DBL2NUM(beg);
4017 ary = rb_ary_new_capa(n);
4018 for (i = 0; i < len; ++i) {
4019 rb_ary_push(ary, val);
4020 }
4021 }
4022 else {
4023 ary = rb_ary_new_capa(n);
4024 for (i = 0; i < n; ++i) {
4025 double d = i*unit+beg;
4026 if (unit >= 0 ? end < d : d < end) d = end;
4027 rb_ary_push(ary, DBL2NUM(d));
4028 }
4029 }
4030
4031 return ary;
4032 }
4033
4034 return rb_call_super(argc, argv);
4035}
4036
4037static inline VALUE
4038num_plus(VALUE a, VALUE b)
4039{
4040 if (RB_INTEGER_TYPE_P(a)) {
4041 return rb_int_plus(a, b);
4042 }
4043 else if (RB_FLOAT_TYPE_P(a)) {
4044 return rb_float_plus(a, b);
4045 }
4046 else if (RB_TYPE_P(a, T_RATIONAL)) {
4047 return rb_rational_plus(a, b);
4048 }
4049 else {
4050 return rb_funcallv(a, '+', 1, &b);
4051 }
4052}
4053
4054static inline VALUE
4055num_minus(VALUE a, VALUE b)
4056{
4057 if (RB_INTEGER_TYPE_P(a)) {
4058 return rb_int_minus(a, b);
4059 }
4060 else if (RB_FLOAT_TYPE_P(a)) {
4061 return rb_float_minus(a, b);
4062 }
4063 else if (RB_TYPE_P(a, T_RATIONAL)) {
4064 return rb_rational_minus(a, b);
4065 }
4066 else {
4067 return rb_funcallv(a, '-', 1, &b);
4068 }
4069}
4070
4071static inline VALUE
4072num_mul(VALUE a, VALUE b)
4073{
4074 if (RB_INTEGER_TYPE_P(a)) {
4075 return rb_int_mul(a, b);
4076 }
4077 else if (RB_FLOAT_TYPE_P(a)) {
4078 return rb_float_mul(a, b);
4079 }
4080 else if (RB_TYPE_P(a, T_RATIONAL)) {
4081 return rb_rational_mul(a, b);
4082 }
4083 else {
4084 return rb_funcallv(a, '*', 1, &b);
4085 }
4086}
4087
4088static inline VALUE
4089num_idiv(VALUE a, VALUE b)
4090{
4091 VALUE q;
4092 if (RB_INTEGER_TYPE_P(a)) {
4093 q = rb_int_idiv(a, b);
4094 }
4095 else if (RB_FLOAT_TYPE_P(a)) {
4096 q = rb_float_div(a, b);
4097 }
4098 else if (RB_TYPE_P(a, T_RATIONAL)) {
4099 q = rb_rational_div(a, b);
4100 }
4101 else {
4102 q = rb_funcallv(a, idDiv, 1, &b);
4103 }
4104
4105 if (RB_INTEGER_TYPE_P(q)) {
4106 return q;
4107 }
4108 else if (RB_FLOAT_TYPE_P(q)) {
4109 return rb_float_floor(q, 0);
4110 }
4111 else if (RB_TYPE_P(q, T_RATIONAL)) {
4112 return rb_rational_floor(q, 0);
4113 }
4114 else {
4115 return rb_funcall(q, rb_intern("floor"), 0);
4116 }
4117}
4118
4119/*
4120 * call-seq:
4121 * aseq.last -> num or nil
4122 * aseq.last(n) -> an_array
4123 *
4124 * Returns the last number in this arithmetic sequence,
4125 * or an array of the last +n+ elements.
4126 */
4127static VALUE
4128arith_seq_last(int argc, VALUE *argv, VALUE self)
4129{
4130 VALUE b, e, s, len_1, len, last, nv, ary;
4131 int last_is_adjusted;
4132 long n;
4133
4134 e = arith_seq_end(self);
4135 if (NIL_P(e)) {
4136 rb_raise(rb_eRangeError,
4137 "cannot get the last element of endless arithmetic sequence");
4138 }
4139
4140 b = arith_seq_begin(self);
4141 s = arith_seq_step(self);
4142
4143 len_1 = num_idiv(num_minus(e, b), s);
4144 if (rb_num_negative_int_p(len_1)) {
4145 if (argc == 0) {
4146 return Qnil;
4147 }
4148 return rb_ary_new_capa(0);
4149 }
4150
4151 last = num_plus(b, num_mul(s, len_1));
4152 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4153 last = num_minus(last, s);
4154 }
4155
4156 if (argc == 0) {
4157 return last;
4158 }
4159
4160 if (last_is_adjusted) {
4161 len = len_1;
4162 }
4163 else {
4164 len = rb_int_plus(len_1, INT2FIX(1));
4165 }
4166
4167 rb_scan_args(argc, argv, "1", &nv);
4168 if (!RB_INTEGER_TYPE_P(nv)) {
4169 nv = rb_to_int(nv);
4170 }
4171 if (RTEST(rb_int_gt(nv, len))) {
4172 nv = len;
4173 }
4174 n = NUM2LONG(nv);
4175 if (n < 0) {
4176 rb_raise(rb_eArgError, "negative array size");
4177 }
4178
4179 ary = rb_ary_new_capa(n);
4180 b = rb_int_minus(last, rb_int_mul(s, nv));
4181 while (n) {
4182 b = rb_int_plus(b, s);
4183 rb_ary_push(ary, b);
4184 --n;
4185 }
4186
4187 return ary;
4188}
4189
4190/*
4191 * call-seq:
4192 * aseq.inspect -> string
4193 *
4194 * Convert this arithmetic sequence to a printable form.
4195 */
4196static VALUE
4197arith_seq_inspect(VALUE self)
4198{
4199 struct enumerator *e;
4200 VALUE eobj, str, eargs;
4201 int range_p;
4202
4203 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4204
4205 eobj = rb_attr_get(self, id_receiver);
4206 if (NIL_P(eobj)) {
4207 eobj = e->obj;
4208 }
4209
4210 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4211 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4212
4213 rb_str_buf_append(str, rb_id2str(e->meth));
4214
4215 eargs = rb_attr_get(eobj, id_arguments);
4216 if (NIL_P(eargs)) {
4217 eargs = e->args;
4218 }
4219 if (eargs != Qfalse) {
4220 long argc = RARRAY_LEN(eargs);
4221 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
4222
4223 if (argc > 0) {
4224 VALUE kwds = Qnil;
4225
4226 rb_str_buf_cat2(str, "(");
4227
4228 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
4229 int all_key = TRUE;
4230 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
4231 if (all_key) kwds = argv[--argc];
4232 }
4233
4234 while (argc--) {
4235 VALUE arg = *argv++;
4236
4237 rb_str_append(str, rb_inspect(arg));
4238 rb_str_buf_cat2(str, ", ");
4239 }
4240 if (!NIL_P(kwds)) {
4241 rb_hash_foreach(kwds, kwd_append, str);
4242 }
4243 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
4244 rb_str_buf_cat2(str, ")");
4245 }
4246 }
4247
4248 rb_str_buf_cat2(str, ")");
4249
4250 return str;
4251}
4252
4253/*
4254 * call-seq:
4255 * aseq == obj -> true or false
4256 *
4257 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4258 * has equivalent begin, end, step, and exclude_end? settings.
4259 */
4260static VALUE
4261arith_seq_eq(VALUE self, VALUE other)
4262{
4263 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4264 return Qfalse;
4265 }
4266
4267 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4268 return Qfalse;
4269 }
4270
4271 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4272 return Qfalse;
4273 }
4274
4275 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4276 return Qfalse;
4277 }
4278
4279 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4280 return Qfalse;
4281 }
4282
4283 return Qtrue;
4284}
4285
4286/*
4287 * call-seq:
4288 * aseq.hash -> integer
4289 *
4290 * Compute a hash-value for this arithmetic sequence.
4291 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4292 * values will generate the same hash-value.
4293 *
4294 * See also Object#hash.
4295 */
4296static VALUE
4297arith_seq_hash(VALUE self)
4298{
4299 st_index_t hash;
4300 VALUE v;
4301
4302 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4303 v = rb_hash(arith_seq_begin(self));
4304 hash = rb_hash_uint(hash, NUM2LONG(v));
4305 v = rb_hash(arith_seq_end(self));
4306 hash = rb_hash_uint(hash, NUM2LONG(v));
4307 v = rb_hash(arith_seq_step(self));
4308 hash = rb_hash_uint(hash, NUM2LONG(v));
4309 hash = rb_hash_end(hash);
4310
4311 return ST2FIX(hash);
4312}
4313
4314#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4315
4317 VALUE current;
4318 VALUE end;
4319 VALUE step;
4320 int excl;
4321};
4322
4323/*
4324 * call-seq:
4325 * aseq.each {|i| block } -> aseq
4326 * aseq.each -> aseq
4327 */
4328static VALUE
4329arith_seq_each(VALUE self)
4330{
4331 VALUE c, e, s, len_1, last;
4332 int x;
4333
4334 if (!rb_block_given_p()) return self;
4335
4336 c = arith_seq_begin(self);
4337 e = arith_seq_end(self);
4338 s = arith_seq_step(self);
4339 x = arith_seq_exclude_end_p(self);
4340
4341 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4342 return self;
4343 }
4344
4345 if (NIL_P(e)) {
4346 while (1) {
4347 rb_yield(c);
4348 c = rb_int_plus(c, s);
4349 }
4350
4351 return self;
4352 }
4353
4354 if (rb_equal(s, INT2FIX(0))) {
4355 while (1) {
4356 rb_yield(c);
4357 }
4358
4359 return self;
4360 }
4361
4362 len_1 = num_idiv(num_minus(e, c), s);
4363 last = num_plus(c, num_mul(s, len_1));
4364 if (x && rb_equal(last, e)) {
4365 last = num_minus(last, s);
4366 }
4367
4368 if (rb_num_negative_int_p(s)) {
4369 while (NUM_GE(c, last)) {
4370 rb_yield(c);
4371 c = num_plus(c, s);
4372 }
4373 }
4374 else {
4375 while (NUM_GE(last, c)) {
4376 rb_yield(c);
4377 c = num_plus(c, s);
4378 }
4379 }
4380
4381 return self;
4382}
4383
4384/*
4385 * call-seq:
4386 * aseq.size -> num or nil
4387 *
4388 * Returns the number of elements in this arithmetic sequence if it is a finite
4389 * sequence. Otherwise, returns <code>nil</code>.
4390 */
4391static VALUE
4392arith_seq_size(VALUE self)
4393{
4394 VALUE b, e, s, len_1, len, last;
4395 int x;
4396
4397 b = arith_seq_begin(self);
4398 e = arith_seq_end(self);
4399 s = arith_seq_step(self);
4400 x = arith_seq_exclude_end_p(self);
4401
4402 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4403 double ee, n;
4404
4405 if (NIL_P(e)) {
4406 if (rb_num_negative_int_p(s)) {
4407 ee = -HUGE_VAL;
4408 }
4409 else {
4410 ee = HUGE_VAL;
4411 }
4412 }
4413 else {
4414 ee = NUM2DBL(e);
4415 }
4416
4417 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4418 if (isinf(n)) return DBL2NUM(n);
4419 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4420 return rb_dbl2big(n);
4421 }
4422
4423 if (NIL_P(e)) {
4424 return DBL2NUM(HUGE_VAL);
4425 }
4426
4427 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4428 s = rb_to_int(s);
4429 }
4430
4431 if (rb_equal(s, INT2FIX(0))) {
4432 return DBL2NUM(HUGE_VAL);
4433 }
4434
4435 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4436 if (rb_num_negative_int_p(len_1)) {
4437 return INT2FIX(0);
4438 }
4439
4440 last = rb_int_plus(b, rb_int_mul(s, len_1));
4441 if (x && rb_equal(last, e)) {
4442 len = len_1;
4443 }
4444 else {
4445 len = rb_int_plus(len_1, INT2FIX(1));
4446 }
4447
4448 return len;
4449}
4450
4451#define sym(name) ID2SYM(rb_intern_const(name))
4452void
4453InitVM_Enumerator(void)
4454{
4455 ID id_private = rb_intern_const("private");
4456
4457 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4458 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4459
4460 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4462
4463 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4464 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4465 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4466 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4467 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4468 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4469 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4470 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4471 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4472 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4473 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4474 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4475 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4476 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4477 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4478 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4479 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4481
4482 /* Lazy */
4484 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4485
4486 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4487 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4488 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4489 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4490 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4491 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4492 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4493 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4494 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4495 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4496 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4497 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4498 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4499 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4500 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4501 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4502 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4503 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4504
4505 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4506 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4507 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4508 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4509 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4510 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4511 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4512 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4513 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4514 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4515 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4516 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4517 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4518 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4519 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4520 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4521 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4522
4523 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4524 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4525 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4526 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4527 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4528 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4529 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4530 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4531 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4532 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4533 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4534 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4535 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4536 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4537 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4538 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4539 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4540 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4541 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4542 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4543 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4544 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4545 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4546 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4547 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4548 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4549 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4550 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4551 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4552
4553 lazy_use_super_method = rb_hash_new_with_size(18);
4554 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4555 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4556 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4557 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4558 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4559 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4560 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4561 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4562 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4563 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4564 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4565 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4566 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4567 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4568 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4569 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4570 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4571 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4572 rb_obj_freeze(lazy_use_super_method);
4573 rb_vm_register_global_object(lazy_use_super_method);
4574
4575#if 0 /* for RDoc */
4576 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4577 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4578 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4579 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4580 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4581 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4582#endif
4583 rb_define_alias(rb_cLazy, "force", "to_a");
4584
4586 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4587
4588 /* Generator */
4589 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4590 rb_include_module(rb_cGenerator, rb_mEnumerable);
4591 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4592 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4593 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4594 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4595
4596 /* Yielder */
4597 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4598 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4599 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4600 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4601 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4602 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4603
4604 /* Producer */
4605 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4606 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4607 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4608 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4609
4610 /* Chain */
4611 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4612 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4613 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4614 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4615 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4616 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4617 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4618 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4619 rb_undef_method(rb_cEnumChain, "feed");
4620 rb_undef_method(rb_cEnumChain, "next");
4621 rb_undef_method(rb_cEnumChain, "next_values");
4622 rb_undef_method(rb_cEnumChain, "peek");
4623 rb_undef_method(rb_cEnumChain, "peek_values");
4624
4625 /* Product */
4626 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4627 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4628 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4629 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4630 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4631 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4632 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4633 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4634 rb_undef_method(rb_cEnumProduct, "feed");
4635 rb_undef_method(rb_cEnumProduct, "next");
4636 rb_undef_method(rb_cEnumProduct, "next_values");
4637 rb_undef_method(rb_cEnumProduct, "peek");
4638 rb_undef_method(rb_cEnumProduct, "peek_values");
4639 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4640
4641 /* ArithmeticSequence */
4642 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4643 rb_undef_alloc_func(rb_cArithSeq);
4644 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4645 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4646 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4647 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4648 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4649 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4650 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4651 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4652 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4653 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4654 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4655 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4656 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4657 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4658
4659 rb_provide("enumerator.so"); /* for backward compatibility */
4660}
4661#undef sym
4662
4663void
4664Init_Enumerator(void)
4665{
4666 id_rewind = rb_intern_const("rewind");
4667 id_new = rb_intern_const("new");
4668 id_next = rb_intern_const("next");
4669 id_result = rb_intern_const("result");
4670 id_receiver = rb_intern_const("receiver");
4671 id_arguments = rb_intern_const("arguments");
4672 id_memo = rb_intern_const("memo");
4673 id_method = rb_intern_const("method");
4674 id_force = rb_intern_const("force");
4675 id_to_enum = rb_intern_const("to_enum");
4676 id_each_entry = rb_intern_const("each_entry");
4677 id_begin = rb_intern_const("begin");
4678 id_end = rb_intern_const("end");
4679 id_step = rb_intern_const("step");
4680 id_exclude_end = rb_intern_const("exclude_end");
4681 sym_each = ID2SYM(id_each);
4682 sym_cycle = ID2SYM(rb_intern_const("cycle"));
4683 sym_yield = ID2SYM(rb_intern_const("yield"));
4684
4685 InitVM(Enumerator);
4686}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1187
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:980
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:1012
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2345
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:957
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2166
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:2635
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:949
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:936
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1682
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:203
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:659
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:405
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:657
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:406
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:675
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:2086
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1434
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1430
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1428
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:181
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1481
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1432
VALUE rb_mKernel
Kernel module.
Definition object.c:65
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:163
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:104
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:196
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:247
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:576
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:680
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:179
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:865
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1260
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3192
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1099
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1176
#define rb_funcall2
Definition eval.h:205
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:362
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:206
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:715
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:484
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2095
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:841
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1023
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:984
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:119
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1804
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:942
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:945
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3677
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:1917
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3643
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3269
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1746
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1844
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1335
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:293
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:2953
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1284
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:668
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:662
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:284
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:986
ID rb_to_id(VALUE str)
Definition string.c:12479
int len
Length of the buffer.
Definition io.h:8
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1366
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1388
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1354
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1394
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1541
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:372
#define ALLOCA_N(type, n)
Definition memory.h:292
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
#define RUBY_TYPED_DEFAULT_FREE
This is a value you can set to rb_data_type_struct::dfree.
Definition rtypeddata.h:79
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:515
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:497
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:231
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:105
Definition enumerator.c:236
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:200
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:264
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376