Ruby 4.1.0dev (2026-04-03 revision 9ef2153cdfc96626a1b5096d9c31ca23f3395036)
enumerator.c (9ef2153cdfc96626a1b5096d9c31ca23f3395036)
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include <limits.h>
22#include "id.h"
23#include "internal.h"
24#include "internal/class.h"
25#include "internal/enumerator.h"
26#include "internal/error.h"
27#include "internal/hash.h"
28#include "internal/imemo.h"
29#include "internal/numeric.h"
30#include "internal/range.h"
31#include "internal/rational.h"
32#include "ruby/ruby.h"
33
34/*
35 * Document-class: Enumerator
36 *
37 * \Class \Enumerator supports:
38 *
39 * - {External iteration}[rdoc-ref:Enumerator@External+Iteration].
40 * - {Internal iteration}[rdoc-ref:Enumerator@Internal+Iteration].
41 *
42 * An \Enumerator may be created by the following methods:
43 *
44 * - Object#to_enum.
45 * - Object#enum_for.
46 * - Enumerator.new.
47 *
48 * In addition, certain Ruby methods return \Enumerator objects:
49 * a Ruby iterator method that accepts a block
50 * may return an \Enumerator if no block is given.
51 * There are many such methods, for example, in classes Array and Hash.
52 * (In the documentation for those classes, search for `new_enumerator`.)
53 *
54 * == Internal Iteration
55 *
56 * In _internal iteration_, an iterator method drives the iteration
57 * and the caller's block handles the processing;
58 * this example uses method #each_with_index:
59 *
60 * words = %w[foo bar baz] # => ["foo", "bar", "baz"]
61 * enumerator = words.each # => #<Enumerator: ...>
62 * enumerator.each_with_index {|word, i| puts "#{i}: #{word}" }
63 * 0: foo
64 * 1: bar
65 * 2: baz
66 *
67 * Iterator methods in class \Enumerator include:
68 *
69 * - #each:
70 * passes each item to the block.
71 * - #each_with_index:
72 * passes each item and its index to the block.
73 * - #each_with_object (aliased as #with_object):
74 * passes each item and a given object to the block.
75 * - #with_index:
76 * like #each_with_index, but starting at a given offset (instead of zero).
77 *
78 * \Class \Enumerator includes module Enumerable,
79 * which provides many more iterator methods.
80 *
81 * == External Iteration
82 *
83 * In _external iteration_, the user's program both drives the iteration
84 * and handles the processing in stream-like fashion;
85 * this example uses method #next:
86 *
87 * words = %w[foo bar baz]
88 * enumerator = words.each
89 * enumerator.next # => "foo"
90 * enumerator.next # => "bar"
91 * enumerator.next # => "baz"
92 * enumerator.next # Raises StopIteration: iteration reached an end
93 *
94 * External iteration methods in class \Enumerator include:
95 *
96 * - #feed:
97 * sets the value that is next to be returned.
98 * - #next:
99 * returns the next value and increments the position.
100 * - #next_values:
101 * returns the next value in a 1-element array and increments the position.
102 * - #peek:
103 * returns the next value but does not increment the position.
104 * - #peek_values:
105 * returns the next value in a 1-element array but does not increment the position.
106 * - #rewind:
107 * sets the position to zero.
108 *
109 * Each of these methods raises FrozenError if called from a frozen \Enumerator.
110 *
111 * == External Iteration and \Fiber
112 *
113 * External iteration that uses Fiber differs *significantly* from internal iteration:
114 *
115 * - Using \Fiber adds some overhead compared to internal enumeration.
116 * - The stacktrace will only include the stack from the \Enumerator, not above.
117 * - \Fiber-local variables are *not* inherited inside the \Enumerator \Fiber,
118 * which instead starts with no \Fiber-local variables.
119 * - \Fiber storage variables *are* inherited and are designed
120 * to handle \Enumerator Fibers. Assigning to a \Fiber storage variable
121 * only affects the current \Fiber, so if you want to change state
122 * in the caller \Fiber of the \Enumerator \Fiber, you need to use an
123 * extra indirection (e.g., use some object in the \Fiber storage
124 * variable and mutate some ivar of it).
125 *
126 * Concretely:
127 *
128 * Thread.current[:fiber_local] = 1
129 * Fiber[:storage_var] = 1
130 * e = Enumerator.new do |y|
131 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
132 * p Fiber[:storage_var] # => 1, inherited
133 * Fiber[:storage_var] += 1
134 * y << 42
135 * end
136 *
137 * p e.next # => 42
138 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
139 *
140 * e.each { p _1 }
141 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
142 *
143 * == Converting External Iteration to Internal Iteration
144 *
145 * You can use an external iterator to implement an internal iterator as follows:
146 *
147 * def ext_each(e)
148 * while true
149 * begin
150 * vs = e.next_values
151 * rescue StopIteration
152 * return $!.result
153 * end
154 * y = yield(*vs)
155 * e.feed y
156 * end
157 * end
158 *
159 * o = Object.new
160 *
161 * def o.each
162 * puts yield
163 * puts yield(1)
164 * puts yield(1, 2)
165 * 3
166 * end
167 *
168 * # use o.each as an internal iterator directly.
169 * puts o.each {|*x| puts x; [:b, *x] }
170 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
171 *
172 * # convert o.each to an external iterator for
173 * # implementing an internal iterator.
174 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
175 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
176 *
177 */
179static VALUE rb_cLazy;
180static ID id_rewind, id_to_enum, id_each_entry;
181static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
182static VALUE sym_each, sym_yield;
183
184static VALUE lazy_use_super_method;
185
186extern ID ruby_static_id_cause;
187
188#define id_call idCall
189#define id_cause ruby_static_id_cause
190#define id_each idEach
191#define id_eqq idEqq
192#define id_initialize idInitialize
193#define id_size idSize
194
196
198 VALUE obj;
199 ID meth;
200 VALUE args;
201 VALUE fib;
202 VALUE dst;
203 VALUE lookahead;
204 VALUE feedvalue;
205 VALUE stop_exc;
206 VALUE size;
207 VALUE procs;
209 int kw_splat;
210};
211
212RUBY_REFERENCES(enumerator_refs) = {
213 RUBY_REF_EDGE(struct enumerator, obj),
214 RUBY_REF_EDGE(struct enumerator, args),
215 RUBY_REF_EDGE(struct enumerator, fib),
216 RUBY_REF_EDGE(struct enumerator, dst),
217 RUBY_REF_EDGE(struct enumerator, lookahead),
218 RUBY_REF_EDGE(struct enumerator, feedvalue),
219 RUBY_REF_EDGE(struct enumerator, stop_exc),
220 RUBY_REF_EDGE(struct enumerator, size),
221 RUBY_REF_EDGE(struct enumerator, procs),
222 RUBY_REF_END
223};
224
225static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
226
227struct generator {
228 VALUE proc;
229 VALUE obj;
230};
231
232struct yielder {
233 VALUE proc;
234};
235
236struct producer {
237 VALUE init;
238 VALUE proc;
239 VALUE size;
240};
241
242typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
243typedef VALUE lazyenum_size_func(VALUE, VALUE);
244typedef int lazyenum_precheck_func(VALUE proc_entry);
245typedef struct {
246 lazyenum_proc_func *proc;
247 lazyenum_size_func *size;
248 lazyenum_precheck_func *precheck;
250
252 VALUE proc;
253 VALUE memo;
254 const lazyenum_funcs *fn;
255};
256
257static VALUE generator_allocate(VALUE klass);
258static VALUE generator_init(VALUE obj, VALUE proc);
259
260static VALUE rb_cEnumChain;
261
263 VALUE enums;
264 long pos;
265};
266
267static VALUE rb_cEnumProduct;
268
270 VALUE enums;
271};
272
273VALUE rb_cArithSeq;
274
275static const rb_data_type_t enumerator_data_type = {
276 "enumerator",
277 {
278 RUBY_REFS_LIST_PTR(enumerator_refs),
280 NULL, // Nothing allocated externally, so don't need a memsize function
281 NULL,
282 },
283 0, NULL, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
284};
285
286static struct enumerator *
287enumerator_ptr(VALUE obj)
288{
289 struct enumerator *ptr;
290
291 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
292 if (!ptr || UNDEF_P(ptr->obj)) {
293 rb_raise(rb_eArgError, "uninitialized enumerator");
294 }
295 return ptr;
296}
297
298static void
299proc_entry_mark_and_move(void *p)
300{
301 struct proc_entry *ptr = p;
302 rb_gc_mark_and_move(&ptr->proc);
303 rb_gc_mark_and_move(&ptr->memo);
304}
305
306static const rb_data_type_t proc_entry_data_type = {
307 "proc_entry",
308 {
309 proc_entry_mark_and_move,
311 NULL, // Nothing allocated externally, so don't need a memsize function
312 proc_entry_mark_and_move,
313 },
314 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
315};
316
317static struct proc_entry *
318proc_entry_ptr(VALUE proc_entry)
319{
320 struct proc_entry *ptr;
321
322 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
323
324 return ptr;
325}
326
327/*
328 * call-seq:
329 * obj.to_enum(method = :each, *args) -> enum
330 * obj.enum_for(method = :each, *args) -> enum
331 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
332 * obj.enum_for(method = :each, *args){|*args| block} -> enum
333 *
334 * Creates a new Enumerator which will enumerate by calling +method+ on
335 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
336 * values of enumerator.
337 *
338 * If a block is given, it will be used to calculate the size of
339 * the enumerator without the need to iterate it (see Enumerator#size).
340 *
341 * === Examples
342 *
343 * str = "xyz"
344 *
345 * enum = str.enum_for(:each_byte)
346 * enum.each { |b| puts b }
347 * # => 120
348 * # => 121
349 * # => 122
350 *
351 * # protect an array from being modified by some_method
352 * a = [1, 2, 3]
353 * some_method(a.to_enum)
354 *
355 * # String#split in block form is more memory-effective:
356 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
357 * # This could be rewritten more idiomatically with to_enum:
358 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
359 *
360 * It is typical to call to_enum when defining methods for
361 * a generic Enumerable, in case no block is passed.
362 *
363 * Here is such an example, with parameter passing and a sizing block:
364 *
365 * module Enumerable
366 * # a generic method to repeat the values of any enumerable
367 * def repeat(n)
368 * raise ArgumentError, "#{n} is negative!" if n < 0
369 * unless block_given?
370 * return to_enum(__method__, n) do # __method__ is :repeat here
371 * sz = size # Call size and multiply by n...
372 * sz * n if sz # but return nil if size itself is nil
373 * end
374 * end
375 * each do |*val|
376 * n.times { yield *val }
377 * end
378 * end
379 * end
380 *
381 * %i[hello world].repeat(2) { |w| puts w }
382 * # => Prints 'hello', 'hello', 'world', 'world'
383 * enum = (1..14).repeat(3)
384 * # => returns an Enumerator when called without a block
385 * enum.first(4) # => [1, 1, 1, 2]
386 * enum.size # => 42
387 */
388static VALUE
389obj_to_enum(int argc, VALUE *argv, VALUE obj)
390{
391 VALUE enumerator, meth = sym_each;
392
393 if (argc > 0) {
394 --argc;
395 meth = *argv++;
396 }
397 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
398 if (rb_block_given_p()) {
399 RB_OBJ_WRITE(enumerator, &enumerator_ptr(enumerator)->size, rb_block_proc());
400 }
401 return enumerator;
402}
403
404static VALUE
405enumerator_allocate(VALUE klass)
406{
407 struct enumerator *ptr;
408 VALUE enum_obj;
409
410 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
411 ptr->obj = Qundef;
412
413 return enum_obj;
414}
415
416static VALUE
417enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
418{
419 struct enumerator *ptr;
420
421 rb_check_frozen(enum_obj);
422 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
423
424 if (!ptr) {
425 rb_raise(rb_eArgError, "unallocated enumerator");
426 }
427
428 RB_OBJ_WRITE(enum_obj, &ptr->obj, obj);
429 ptr->meth = rb_to_id(meth);
430 if (argc) RB_OBJ_WRITE(enum_obj, &ptr->args, rb_ary_new4(argc, argv));
431 ptr->fib = 0;
432 ptr->dst = Qnil;
433 ptr->lookahead = Qundef;
434 ptr->feedvalue = Qundef;
435 ptr->stop_exc = Qfalse;
436 RB_OBJ_WRITE(enum_obj, &ptr->size, size);
437 ptr->size_fn = size_fn;
438 ptr->kw_splat = kw_splat;
439
440 return enum_obj;
441}
442
443static VALUE
444convert_to_feasible_size_value(VALUE obj)
445{
446 if (NIL_P(obj)) {
447 return obj;
448 }
449 else if (rb_respond_to(obj, id_call)) {
450 return obj;
451 }
452 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
453 return obj;
454 }
455 else {
456 return rb_to_int(obj);
457 }
458}
459
460/*
461 * call-seq:
462 * Enumerator.new(size = nil) {|yielder| ... }
463 *
464 * Returns a new \Enumerator object that can be used for iteration.
465 *
466 * The given block defines the iteration;
467 * it is called with a "yielder" object that can yield an object
468 * via a call to method <tt>yielder.yield</tt>:
469 *
470 * fib = Enumerator.new do |yielder|
471 * n = next_n = 1
472 * while true do
473 * yielder.yield(n)
474 * n, next_n = next_n, n + next_n
475 * end
476 * end
477 *
478 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
479 *
480 * Parameter +size+ specifies how the size is to be calculated (see #size);
481 * it can either be a value or a callable object:
482 *
483 * Enumerator.new{}.size # => nil
484 * Enumerator.new(42){}.size # => 42
485 * Enumerator.new(-> {42}){}.size # => 42
486 *
487 */
488static VALUE
489enumerator_initialize(int argc, VALUE *argv, VALUE obj)
490{
491 VALUE iter = rb_block_proc();
492 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
493 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
494 VALUE size = convert_to_feasible_size_value(arg0);
495
496 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
497}
498
499/* :nodoc: */
500static VALUE
501enumerator_init_copy(VALUE obj, VALUE orig)
502{
503 struct enumerator *ptr0, *ptr1;
504
505 if (!OBJ_INIT_COPY(obj, orig)) return obj;
506 ptr0 = enumerator_ptr(orig);
507 if (ptr0->fib) {
508 /* Fibers cannot be copied */
509 rb_raise(rb_eTypeError, "can't copy execution context");
510 }
511
512 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
513
514 if (!ptr1) {
515 rb_raise(rb_eArgError, "unallocated enumerator");
516 }
517
518 RB_OBJ_WRITE(obj, &ptr1->obj, ptr0->obj);
519 ptr1->meth = ptr0->meth;
520 RB_OBJ_WRITE(obj, &ptr1->args, ptr0->args);
521 ptr1->fib = 0;
522 ptr1->lookahead = Qundef;
523 ptr1->feedvalue = Qundef;
524 RB_OBJ_WRITE(obj, &ptr1->size, ptr0->size);
525 ptr1->size_fn = ptr0->size_fn;
526
527 return obj;
528}
529
530/*
531 * For backwards compatibility; use rb_enumeratorize_with_size
532 */
533VALUE
534rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
535{
536 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
537}
538
539static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
540static int lazy_precheck(VALUE procs);
541
542VALUE
543rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
544{
545 VALUE base_class = rb_cEnumerator;
546
547 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
548 base_class = rb_cLazy;
549 }
550 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
551 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
552 }
553
554 return enumerator_init(enumerator_allocate(base_class),
555 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
556}
557
558VALUE
559rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
560{
561 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
562}
563
564static VALUE
565enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
566{
567 int argc = 0;
568 const VALUE *argv = 0;
569 const struct enumerator *e = enumerator_ptr(obj);
570 ID meth = e->meth;
571
572 VALUE args = e->args;
573 if (args) {
574 argc = RARRAY_LENINT(args);
575 argv = RARRAY_CONST_PTR(args);
576 }
577
578 VALUE ret = rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
579
580 RB_GC_GUARD(args);
581
582 return ret;
583}
584
585/*
586 * call-seq:
587 * enum.each { |elm| block } -> obj
588 * enum.each -> enum
589 * enum.each(*appending_args) { |elm| block } -> obj
590 * enum.each(*appending_args) -> an_enumerator
591 *
592 * Iterates over the block according to how this Enumerator was constructed.
593 * If no block and no arguments are given, returns self.
594 *
595 * === Examples
596 *
597 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
598 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
599 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
600 *
601 * obj = Object.new
602 *
603 * def obj.each_arg(a, b=:b, *rest)
604 * yield a
605 * yield b
606 * yield rest
607 * :method_returned
608 * end
609 *
610 * enum = obj.to_enum :each_arg, :a, :x
611 *
612 * enum.each.to_a #=> [:a, :x, []]
613 * enum.each.equal?(enum) #=> true
614 * enum.each { |elm| elm } #=> :method_returned
615 *
616 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
617 * enum.each(:y, :z).equal?(enum) #=> false
618 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
619 *
620 */
621static VALUE
622enumerator_each(int argc, VALUE *argv, VALUE obj)
623{
624 struct enumerator *e = enumerator_ptr(obj);
625
626 if (argc > 0) {
627 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
628 if (args) {
629#if SIZEOF_INT < SIZEOF_LONG
630 /* check int range overflow */
631 rb_long2int(RARRAY_LEN(args) + argc);
632#endif
633 args = rb_ary_dup(args);
634 rb_ary_cat(args, argv, argc);
635 }
636 else {
637 args = rb_ary_new4(argc, argv);
638 }
639 RB_OBJ_WRITE(obj, &e->args, args);
640 e->size = Qnil;
641 e->size_fn = 0;
642 }
643 if (!rb_block_given_p()) return obj;
644
645 if (!lazy_precheck(e->procs)) return Qnil;
646
647 return enumerator_block_call(obj, 0, obj);
648}
649
650static VALUE
651enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
652{
653 struct MEMO *memo = (struct MEMO *)m;
654 VALUE idx = memo->v1;
655 MEMO_V1_SET(memo, rb_int_succ(idx));
656
657 if (argc <= 1)
658 return rb_yield_values(2, val, idx);
659
660 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
661}
662
663static VALUE
664enumerator_size(VALUE obj);
665
666static VALUE
667enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
668{
669 return enumerator_size(obj);
670}
671
672/*
673 * call-seq:
674 * e.with_index(offset = 0) {|(*args), idx| ... }
675 * e.with_index(offset = 0)
676 *
677 * Iterates the given block for each element with an index, which
678 * starts from +offset+. If no block is given, returns a new Enumerator
679 * that includes the index, starting from +offset+
680 *
681 * +offset+:: the starting index to use
682 *
683 */
684static VALUE
685enumerator_with_index(int argc, VALUE *argv, VALUE obj)
686{
687 VALUE memo;
688
689 rb_check_arity(argc, 0, 1);
690 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
691 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
692 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)rb_imemo_memo_new(memo, 0, 0));
693}
694
695/*
696 * call-seq:
697 * e.each_with_index {|(*args), idx| ... }
698 * e.each_with_index
699 *
700 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
701 *
702 * If no block is given, a new Enumerator is returned that includes the index.
703 *
704 */
705static VALUE
706enumerator_each_with_index(VALUE obj)
707{
708 return enumerator_with_index(0, NULL, obj);
709}
710
711static VALUE
712enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
713{
714 if (argc <= 1)
715 return rb_yield_values(2, val, memo);
716
717 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
718}
719
720/*
721 * call-seq:
722 * e.each_with_object(obj) {|(*args), obj| ... }
723 * e.each_with_object(obj)
724 * e.with_object(obj) {|(*args), obj| ... }
725 * e.with_object(obj)
726 *
727 * Iterates the given block for each element with an arbitrary object, +obj+,
728 * and returns +obj+
729 *
730 * If no block is given, returns a new Enumerator.
731 *
732 * === Example
733 *
734 * to_three = Enumerator.new do |y|
735 * 3.times do |x|
736 * y << x
737 * end
738 * end
739 *
740 * to_three_with_string = to_three.with_object("foo")
741 * to_three_with_string.each do |x,string|
742 * puts "#{string}: #{x}"
743 * end
744 *
745 * # => foo: 0
746 * # => foo: 1
747 * # => foo: 2
748 */
749static VALUE
750enumerator_with_object(VALUE obj, VALUE memo)
751{
752 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
753 enumerator_block_call(obj, enumerator_with_object_i, memo);
754
755 return memo;
756}
757
758static VALUE
759next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
760{
761 struct enumerator *e = enumerator_ptr(obj);
762 VALUE feedvalue = Qnil;
763 VALUE args = rb_ary_new4(argc, argv);
764 rb_fiber_yield(1, &args);
765 if (!UNDEF_P(e->feedvalue)) {
766 feedvalue = e->feedvalue;
767 e->feedvalue = Qundef;
768 }
769 return feedvalue;
770}
771
772static VALUE
773next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
774{
775 struct enumerator *e = enumerator_ptr(obj);
776 VALUE nil = Qnil;
777 VALUE result;
778
779 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
780 RB_OBJ_WRITE(obj, &e->stop_exc, rb_exc_new2(rb_eStopIteration, "iteration reached an end"));
781 rb_ivar_set(e->stop_exc, id_result, result);
782 return rb_fiber_yield(1, &nil);
783}
784
785static void
786next_init(VALUE obj, struct enumerator *e)
787{
788 VALUE curr = rb_fiber_current();
789 RB_OBJ_WRITE(obj, &e->dst, curr);
790 RB_OBJ_WRITE(obj, &e->fib, rb_fiber_new(next_i, obj));
791 e->lookahead = Qundef;
792}
793
794static VALUE
795get_next_values(VALUE obj, struct enumerator *e)
796{
797 VALUE curr, vs;
798
799 if (e->stop_exc) {
800 VALUE exc = e->stop_exc;
801 VALUE result = rb_attr_get(exc, id_result);
802 VALUE mesg = rb_attr_get(exc, idMesg);
803 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
804 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
805 rb_ivar_set(stop_exc, id_cause, exc);
806 rb_ivar_set(stop_exc, id_result, result);
807 rb_exc_raise(stop_exc);
808 }
809
810 curr = rb_fiber_current();
811
812 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
813 next_init(obj, e);
814 }
815
816 vs = rb_fiber_resume(e->fib, 1, &curr);
817 if (e->stop_exc) {
818 e->fib = 0;
819 e->dst = Qnil;
820 e->lookahead = Qundef;
821 e->feedvalue = Qundef;
822 rb_exc_raise(e->stop_exc);
823 }
824 return vs;
825}
826
827/*
828 * call-seq:
829 * e.next_values -> array
830 *
831 * Returns the next object as an array in the enumerator, and move the
832 * internal position forward. When the position reached at the end,
833 * StopIteration is raised.
834 *
835 * See class-level notes about external iterators.
836 *
837 * This method can be used to distinguish <code>yield</code> and <code>yield
838 * nil</code>.
839 *
840 * === Example
841 *
842 * o = Object.new
843 * def o.each
844 * yield
845 * yield 1
846 * yield 1, 2
847 * yield nil
848 * yield [1, 2]
849 * end
850 * e = o.to_enum
851 * p e.next_values
852 * p e.next_values
853 * p e.next_values
854 * p e.next_values
855 * p e.next_values
856 * e = o.to_enum
857 * p e.next
858 * p e.next
859 * p e.next
860 * p e.next
861 * p e.next
862 *
863 * ## yield args next_values next
864 * # yield [] nil
865 * # yield 1 [1] 1
866 * # yield 1, 2 [1, 2] [1, 2]
867 * # yield nil [nil] nil
868 * # yield [1, 2] [[1, 2]] [1, 2]
869 *
870 */
871
872static VALUE
873enumerator_next_values(VALUE obj)
874{
875 struct enumerator *e = enumerator_ptr(obj);
876 VALUE vs;
877
878 rb_check_frozen(obj);
879
880 if (!UNDEF_P(e->lookahead)) {
881 vs = e->lookahead;
882 e->lookahead = Qundef;
883 return vs;
884 }
885
886 return get_next_values(obj, e);
887}
888
889static VALUE
890ary2sv(VALUE args, int dup)
891{
892 if (!RB_TYPE_P(args, T_ARRAY))
893 return args;
894
895 switch (RARRAY_LEN(args)) {
896 case 0:
897 return Qnil;
898
899 case 1:
900 return RARRAY_AREF(args, 0);
901
902 default:
903 if (dup)
904 return rb_ary_dup(args);
905 return args;
906 }
907}
908
909/*
910 * call-seq:
911 * e.next -> object
912 *
913 * Returns the next object in the enumerator, and move the internal position
914 * forward. When the position reached at the end, StopIteration is raised.
915 *
916 * === Example
917 *
918 * a = [1,2,3]
919 * e = a.to_enum
920 * p e.next #=> 1
921 * p e.next #=> 2
922 * p e.next #=> 3
923 * p e.next #raises StopIteration
924 *
925 * See class-level notes about external iterators.
926 *
927 */
928
929static VALUE
930enumerator_next(VALUE obj)
931{
932 VALUE vs = enumerator_next_values(obj);
933 return ary2sv(vs, 0);
934}
935
936static VALUE
937enumerator_peek_values(VALUE obj)
938{
939 struct enumerator *e = enumerator_ptr(obj);
940
941 rb_check_frozen(obj);
942
943 if (UNDEF_P(e->lookahead)) {
944 RB_OBJ_WRITE(obj, &e->lookahead, get_next_values(obj, e));
945 }
946
947 return e->lookahead;
948}
949
950/*
951 * call-seq:
952 * e.peek_values -> array
953 *
954 * Returns the next object as an array, similar to Enumerator#next_values, but
955 * doesn't move the internal position forward. If the position is already at
956 * the end, StopIteration is raised.
957 *
958 * See class-level notes about external iterators.
959 *
960 * === Example
961 *
962 * o = Object.new
963 * def o.each
964 * yield
965 * yield 1
966 * yield 1, 2
967 * end
968 * e = o.to_enum
969 * p e.peek_values #=> []
970 * e.next
971 * p e.peek_values #=> [1]
972 * p e.peek_values #=> [1]
973 * e.next
974 * p e.peek_values #=> [1, 2]
975 * e.next
976 * p e.peek_values # raises StopIteration
977 *
978 */
979
980static VALUE
981enumerator_peek_values_m(VALUE obj)
982{
983 return rb_ary_dup(enumerator_peek_values(obj));
984}
985
986/*
987 * call-seq:
988 * e.peek -> object
989 *
990 * Returns the next object in the enumerator, but doesn't move the internal
991 * position forward. If the position is already at the end, StopIteration
992 * is raised.
993 *
994 * See class-level notes about external iterators.
995 *
996 * === Example
997 *
998 * a = [1,2,3]
999 * e = a.to_enum
1000 * p e.next #=> 1
1001 * p e.peek #=> 2
1002 * p e.peek #=> 2
1003 * p e.peek #=> 2
1004 * p e.next #=> 2
1005 * p e.next #=> 3
1006 * p e.peek #raises StopIteration
1007 *
1008 */
1009
1010static VALUE
1011enumerator_peek(VALUE obj)
1012{
1013 VALUE vs = enumerator_peek_values(obj);
1014 return ary2sv(vs, 1);
1015}
1016
1017/*
1018 * call-seq:
1019 * e.feed obj -> nil
1020 *
1021 * Sets the value to be returned by the next yield inside +e+.
1022 *
1023 * If the value is not set, the yield returns nil.
1024 *
1025 * This value is cleared after being yielded.
1026 *
1027 * # Array#map passes the array's elements to "yield" and collects the
1028 * # results of "yield" as an array.
1029 * # Following example shows that "next" returns the passed elements and
1030 * # values passed to "feed" are collected as an array which can be
1031 * # obtained by StopIteration#result.
1032 * e = [1,2,3].map
1033 * p e.next #=> 1
1034 * e.feed "a"
1035 * p e.next #=> 2
1036 * e.feed "b"
1037 * p e.next #=> 3
1038 * e.feed "c"
1039 * begin
1040 * e.next
1041 * rescue StopIteration
1042 * p $!.result #=> ["a", "b", "c"]
1043 * end
1044 *
1045 * o = Object.new
1046 * def o.each
1047 * x = yield # (2) blocks
1048 * p x # (5) => "foo"
1049 * x = yield # (6) blocks
1050 * p x # (8) => nil
1051 * x = yield # (9) blocks
1052 * p x # not reached w/o another e.next
1053 * end
1054 *
1055 * e = o.to_enum
1056 * e.next # (1)
1057 * e.feed "foo" # (3)
1058 * e.next # (4)
1059 * e.next # (7)
1060 * # (10)
1061 */
1062
1063static VALUE
1064enumerator_feed(VALUE obj, VALUE v)
1065{
1066 struct enumerator *e = enumerator_ptr(obj);
1067
1068 rb_check_frozen(obj);
1069
1070 if (!UNDEF_P(e->feedvalue)) {
1071 rb_raise(rb_eTypeError, "feed value already set");
1072 }
1073 RB_OBJ_WRITE(obj, &e->feedvalue, v);
1074
1075 return Qnil;
1076}
1077
1078/*
1079 * call-seq:
1080 * e.rewind -> e
1081 *
1082 * Rewinds the enumeration sequence to the beginning.
1083 *
1084 * If the enclosed object responds to a "rewind" method, it is called.
1085 */
1086
1087static VALUE
1088enumerator_rewind(VALUE obj)
1089{
1090 struct enumerator *e = enumerator_ptr(obj);
1091
1092 rb_check_frozen(obj);
1093
1094 rb_check_funcall(e->obj, id_rewind, 0, 0);
1095
1096 e->fib = 0;
1097 e->dst = Qnil;
1098 e->lookahead = Qundef;
1099 e->feedvalue = Qundef;
1100 e->stop_exc = Qfalse;
1101 return obj;
1102}
1103
1104static struct generator *generator_ptr(VALUE obj);
1105static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1106
1107static VALUE
1108inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1109{
1110 struct enumerator *e;
1111 VALUE eobj, str, cname;
1112
1113 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1114
1115 cname = rb_obj_class(obj);
1116
1117 if (!e || UNDEF_P(e->obj)) {
1118 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1119 }
1120
1121 if (recur) {
1122 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1123 return str;
1124 }
1125
1126 if (e->procs) {
1127 long i;
1128
1129 eobj = generator_ptr(e->obj)->obj;
1130 /* In case procs chained enumerator traversing all proc entries manually */
1131 if (rb_obj_class(eobj) == cname) {
1132 str = rb_inspect(eobj);
1133 }
1134 else {
1135 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1136 }
1137 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1138 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1139 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1140 rb_str_buf_cat2(str, ">");
1141 }
1142 return str;
1143 }
1144
1145 eobj = rb_attr_get(obj, id_receiver);
1146 if (NIL_P(eobj)) {
1147 eobj = e->obj;
1148 }
1149
1150 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1151 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1152 append_method(obj, str, e->meth, e->args);
1153
1154 rb_str_buf_cat2(str, ">");
1155
1156 return str;
1157}
1158
1159static int
1160key_symbol_p(VALUE key, VALUE val, VALUE arg)
1161{
1162 if (SYMBOL_P(key)) return ST_CONTINUE;
1163 *(int *)arg = FALSE;
1164 return ST_STOP;
1165}
1166
1167static int
1168kwd_append(VALUE key, VALUE val, VALUE str)
1169{
1170 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1171 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1172 return ST_CONTINUE;
1173}
1174
1175static VALUE
1176append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1177{
1178 VALUE method, eargs;
1179
1180 method = rb_attr_get(obj, id_method);
1181 if (method != Qfalse) {
1182 if (!NIL_P(method)) {
1183 Check_Type(method, T_SYMBOL);
1184 method = rb_sym2str(method);
1185 }
1186 else {
1187 method = rb_id2str(default_method);
1188 }
1189 rb_str_buf_cat2(str, ":");
1190 rb_str_buf_append(str, method);
1191 }
1192
1193 eargs = rb_attr_get(obj, id_arguments);
1194 if (NIL_P(eargs)) {
1195 eargs = default_args;
1196 }
1197 if (eargs != Qfalse) {
1198 long argc = RARRAY_LEN(eargs);
1199 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1200
1201 if (argc > 0) {
1202 VALUE kwds = Qnil;
1203
1204 rb_str_buf_cat2(str, "(");
1205
1206 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1207 int all_key = TRUE;
1208 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1209 if (all_key) kwds = argv[--argc];
1210 }
1211
1212 while (argc--) {
1213 VALUE arg = *argv++;
1214
1215 rb_str_append(str, rb_inspect(arg));
1216 rb_str_buf_cat2(str, ", ");
1217 }
1218 if (!NIL_P(kwds)) {
1219 rb_hash_foreach(kwds, kwd_append, str);
1220 }
1221 rb_str_set_len(str, RSTRING_LEN(str)-2);
1222 rb_str_buf_cat2(str, ")");
1223 }
1224 }
1225
1226 return str;
1227}
1228
1229/*
1230 * call-seq:
1231 * e.inspect -> string
1232 *
1233 * Creates a printable version of <i>e</i>.
1234 */
1235
1236static VALUE
1237enumerator_inspect(VALUE obj)
1238{
1239 return rb_exec_recursive(inspect_enumerator, obj, 0);
1240}
1241
1242/*
1243 * call-seq:
1244 * e.size -> int, Float::INFINITY or nil
1245 *
1246 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1247 *
1248 * (1..100).to_a.permutation(4).size # => 94109400
1249 * loop.size # => Float::INFINITY
1250 * (1..100).drop_while.size # => nil
1251 *
1252 * Note that enumerator size might be inaccurate, and should be rather treated as a hint.
1253 * For example, there is no check that the size provided to ::new is accurate:
1254 *
1255 * e = Enumerator.new(5) { |y| 2.times { y << it} }
1256 * e.size # => 5
1257 * e.to_a.size # => 2
1258 *
1259 * Another example is an enumerator created by ::produce without a +size+ argument.
1260 * Such enumerators return +Infinity+ for size, but this is inaccurate if the passed
1261 * block raises StopIteration:
1262 *
1263 * e = Enumerator.produce(1) { it + 1 }
1264 * e.size # => Infinity
1265 *
1266 * e = Enumerator.produce(1) { it > 3 ? raise(StopIteration) : it + 1 }
1267 * e.size # => Infinity
1268 * e.to_a.size # => 4
1269 */
1270
1271static VALUE
1272enumerator_size(VALUE obj)
1273{
1274 struct enumerator *e = enumerator_ptr(obj);
1275 int argc = 0;
1276 const VALUE *argv = NULL;
1277 VALUE size;
1278
1279 if (e->procs) {
1280 struct generator *g = generator_ptr(e->obj);
1281 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1282 long i = 0;
1283
1284 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1285 VALUE proc = RARRAY_AREF(e->procs, i);
1286 struct proc_entry *entry = proc_entry_ptr(proc);
1287 lazyenum_size_func *size_fn = entry->fn->size;
1288 if (!size_fn) {
1289 return Qnil;
1290 }
1291 receiver = (*size_fn)(proc, receiver);
1292 }
1293 return receiver;
1294 }
1295
1296 if (e->size_fn) {
1297 return (*e->size_fn)(e->obj, e->args, obj);
1298 }
1299 if (e->args) {
1300 argc = (int)RARRAY_LEN(e->args);
1301 argv = RARRAY_CONST_PTR(e->args);
1302 }
1303 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1304 if (!UNDEF_P(size)) return size;
1305 return e->size;
1306}
1307
1308/*
1309 * Yielder
1310 */
1311static void
1312yielder_mark_and_move(void *p)
1313{
1314 struct yielder *ptr = p;
1315 rb_gc_mark_and_move(&ptr->proc);
1316}
1317
1318static const rb_data_type_t yielder_data_type = {
1319 "yielder",
1320 {
1321 yielder_mark_and_move,
1323 NULL,
1324 yielder_mark_and_move,
1325 },
1326 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1327};
1328
1329static struct yielder *
1330yielder_ptr(VALUE obj)
1331{
1332 struct yielder *ptr;
1333
1334 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1335 if (!ptr || UNDEF_P(ptr->proc)) {
1336 rb_raise(rb_eArgError, "uninitialized yielder");
1337 }
1338 return ptr;
1339}
1340
1341/* :nodoc: */
1342static VALUE
1343yielder_allocate(VALUE klass)
1344{
1345 struct yielder *ptr;
1346 VALUE obj;
1347
1348 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1349 ptr->proc = Qundef;
1350
1351 return obj;
1352}
1353
1354static VALUE
1355yielder_init(VALUE obj, VALUE proc)
1356{
1357 struct yielder *ptr;
1358
1359 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1360
1361 if (!ptr) {
1362 rb_raise(rb_eArgError, "unallocated yielder");
1363 }
1364
1365 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1366
1367 return obj;
1368}
1369
1370/* :nodoc: */
1371static VALUE
1372yielder_initialize(VALUE obj)
1373{
1374 rb_need_block();
1375
1376 return yielder_init(obj, rb_block_proc());
1377}
1378
1379/* :nodoc: */
1380static VALUE
1381yielder_yield(VALUE obj, VALUE args)
1382{
1383 struct yielder *ptr = yielder_ptr(obj);
1384
1385 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1386}
1387
1388/* :nodoc: */
1389static VALUE
1390yielder_yield_push(VALUE obj, VALUE arg)
1391{
1392 struct yielder *ptr = yielder_ptr(obj);
1393
1394 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1395
1396 return obj;
1397}
1398
1399/*
1400 * Returns a Proc object that takes arguments and yields them.
1401 *
1402 * This method is implemented so that a Yielder object can be directly
1403 * passed to another method as a block argument.
1404 *
1405 * enum = Enumerator.new { |y|
1406 * Dir.glob("*.rb") { |file|
1407 * File.open(file) { |f| f.each_line(&y) }
1408 * }
1409 * }
1410 */
1411static VALUE
1412yielder_to_proc(VALUE obj)
1413{
1414 VALUE method = rb_obj_method(obj, sym_yield);
1415
1416 return rb_funcall(method, idTo_proc, 0);
1417}
1418
1419static VALUE
1420yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1421{
1422 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1423}
1424
1425static VALUE
1426yielder_new(void)
1427{
1428 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1429}
1430
1431/*
1432 * Generator
1433 */
1434static void
1435generator_mark_and_move(void *p)
1436{
1437 struct generator *ptr = p;
1438 rb_gc_mark_and_move(&ptr->proc);
1439 rb_gc_mark_and_move(&ptr->obj);
1440}
1441
1442static const rb_data_type_t generator_data_type = {
1443 "generator",
1444 {
1445 generator_mark_and_move,
1447 NULL,
1448 generator_mark_and_move,
1449 },
1450 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1451};
1452
1453static struct generator *
1454generator_ptr(VALUE obj)
1455{
1456 struct generator *ptr;
1457
1458 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1459 if (!ptr || UNDEF_P(ptr->proc)) {
1460 rb_raise(rb_eArgError, "uninitialized generator");
1461 }
1462 return ptr;
1463}
1464
1465/* :nodoc: */
1466static VALUE
1467generator_allocate(VALUE klass)
1468{
1469 struct generator *ptr;
1470 VALUE obj;
1471
1472 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1473 ptr->proc = Qundef;
1474
1475 return obj;
1476}
1477
1478static VALUE
1479generator_init(VALUE obj, VALUE proc)
1480{
1481 struct generator *ptr;
1482
1483 rb_check_frozen(obj);
1484 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1485
1486 if (!ptr) {
1487 rb_raise(rb_eArgError, "unallocated generator");
1488 }
1489
1490 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1491
1492 return obj;
1493}
1494
1495/* :nodoc: */
1496static VALUE
1497generator_initialize(int argc, VALUE *argv, VALUE obj)
1498{
1499 VALUE proc;
1500
1501 if (argc == 0) {
1502 rb_need_block();
1503
1504 proc = rb_block_proc();
1505 }
1506 else {
1507 rb_scan_args(argc, argv, "1", &proc);
1508
1509 if (!rb_obj_is_proc(proc))
1510 rb_raise(rb_eTypeError,
1511 "wrong argument type %"PRIsVALUE" (expected Proc)",
1512 rb_obj_class(proc));
1513
1514 if (rb_block_given_p()) {
1515 rb_warn("given block not used");
1516 }
1517 }
1518
1519 return generator_init(obj, proc);
1520}
1521
1522/* :nodoc: */
1523static VALUE
1524generator_init_copy(VALUE obj, VALUE orig)
1525{
1526 struct generator *ptr0, *ptr1;
1527
1528 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1529
1530 ptr0 = generator_ptr(orig);
1531
1532 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1533
1534 if (!ptr1) {
1535 rb_raise(rb_eArgError, "unallocated generator");
1536 }
1537
1538 RB_OBJ_WRITE(obj, &ptr1->proc, ptr0->proc);
1539
1540 return obj;
1541}
1542
1543/* :nodoc: */
1544static VALUE
1545generator_each(int argc, VALUE *argv, VALUE obj)
1546{
1547 struct generator *ptr = generator_ptr(obj);
1548 VALUE args = rb_ary_new2(argc + 1);
1549
1550 rb_ary_push(args, yielder_new());
1551 if (argc > 0) {
1552 rb_ary_cat(args, argv, argc);
1553 }
1554
1555 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1556}
1557
1558/* Lazy Enumerator methods */
1559static VALUE
1560enum_size(VALUE self)
1561{
1562 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1563 return UNDEF_P(r) ? Qnil : r;
1564}
1565
1566static VALUE
1567lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1568{
1569 return enum_size(self);
1570}
1571
1572#define lazy_receiver_size lazy_map_size
1573
1574static VALUE
1575lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1576{
1577 VALUE result;
1578 if (argc == 1) {
1579 VALUE args[2];
1580 args[0] = m;
1581 args[1] = val;
1582 result = rb_yield_values2(2, args);
1583 }
1584 else {
1585 VALUE args;
1586 int len = rb_long2int((long)argc + 1);
1587 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1588
1589 nargv[0] = m;
1590 if (argc > 0) {
1591 MEMCPY(nargv + 1, argv, VALUE, argc);
1592 }
1593 result = rb_yield_values2(len, nargv);
1594 ALLOCV_END(args);
1595 }
1596 if (UNDEF_P(result)) rb_iter_break();
1597 return Qnil;
1598}
1599
1600static VALUE
1601lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1602{
1603 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1604 return Qnil;
1605}
1606
1607#define memo_value v2
1608#define memo_flags u3.state
1609#define LAZY_MEMO_BREAK 1
1610#define LAZY_MEMO_PACKED 2
1611#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1612#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1613#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1614#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1615#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1616#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1617#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1618
1619#define LAZY_NEED_BLOCK(func) \
1620 if (!rb_block_given_p()) { \
1621 rb_raise(rb_eArgError, "tried to call lazy " #func " without a block"); \
1622 }
1623
1624static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1625
1626static VALUE
1627lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1628{
1629 VALUE yielder = RARRAY_AREF(m, 0);
1630 VALUE procs_array = RARRAY_AREF(m, 1);
1631 VALUE memos = rb_attr_get(yielder, id_memo);
1632 struct MEMO *result;
1633
1634 result = rb_imemo_memo_new(m, rb_enum_values_pack(argc, argv),
1635 argc > 1 ? LAZY_MEMO_PACKED : 0);
1636 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1637}
1638
1639static VALUE
1640lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1641{
1642 VALUE m = result->v1;
1643 VALUE yielder = RARRAY_AREF(m, 0);
1644 VALUE procs_array = RARRAY_AREF(m, 1);
1645 VALUE memos = rb_attr_get(yielder, id_memo);
1646 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1647 if (argc > 1)
1648 LAZY_MEMO_SET_PACKED(result);
1649 else
1650 LAZY_MEMO_RESET_PACKED(result);
1651 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1652}
1653
1654static VALUE
1655lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1656{
1657 int cont = 1;
1658
1659 for (; i < RARRAY_LEN(procs_array); i++) {
1660 VALUE proc = RARRAY_AREF(procs_array, i);
1661 struct proc_entry *entry = proc_entry_ptr(proc);
1662 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1663 cont = 0;
1664 break;
1665 }
1666 }
1667
1668 if (cont) {
1669 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1670 }
1671 if (LAZY_MEMO_BREAK_P(result)) {
1672 rb_iter_break();
1673 }
1674 return result->memo_value;
1675}
1676
1677static VALUE
1678lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1679{
1680 VALUE procs = RARRAY_AREF(m, 1);
1681
1682 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1683 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1684 lazy_init_yielder, rb_ary_new3(2, val, procs));
1685 return Qnil;
1686}
1687
1688static VALUE
1689lazy_generator_init(VALUE enumerator, VALUE procs)
1690{
1692 VALUE obj;
1693 struct generator *gen_ptr;
1694 struct enumerator *e = enumerator_ptr(enumerator);
1695
1696 if (RARRAY_LEN(procs) > 0) {
1697 struct generator *old_gen_ptr = generator_ptr(e->obj);
1698 obj = old_gen_ptr->obj;
1699 }
1700 else {
1701 obj = enumerator;
1702 }
1703
1704 generator = generator_allocate(rb_cGenerator);
1705
1706 rb_block_call(generator, id_initialize, 0, 0,
1707 lazy_init_block, rb_ary_new3(2, obj, procs));
1708
1709 gen_ptr = generator_ptr(generator);
1710 RB_OBJ_WRITE(generator, &gen_ptr->obj, obj);
1711
1712 return generator;
1713}
1714
1715static int
1716lazy_precheck(VALUE procs)
1717{
1718 if (RTEST(procs)) {
1719 long num_procs = RARRAY_LEN(procs), i = num_procs;
1720 while (i-- > 0) {
1721 VALUE proc = RARRAY_AREF(procs, i);
1722 struct proc_entry *entry = proc_entry_ptr(proc);
1723 lazyenum_precheck_func *precheck = entry->fn->precheck;
1724 if (precheck && !precheck(proc)) return FALSE;
1725 }
1726 }
1727
1728 return TRUE;
1729}
1730
1731/*
1732 * Document-class: Enumerator::Lazy
1733 *
1734 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1735 * chains of operations without evaluating them immediately, and evaluating
1736 * values on as-needed basis. In order to do so it redefines most of Enumerable
1737 * methods so that they just construct another lazy enumerator.
1738 *
1739 * Enumerator::Lazy can be constructed from any Enumerable with the
1740 * Enumerable#lazy method.
1741 *
1742 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1743 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1744 *
1745 * The real enumeration is performed when any non-redefined Enumerable method
1746 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1747 * as #force for more semantic code):
1748 *
1749 * lazy.first(2)
1750 * #=> [21, 23]
1751 *
1752 * lazy.force
1753 * #=> [21, 23, 25, 27, 29]
1754 *
1755 * Note that most Enumerable methods that could be called with or without
1756 * a block, on Enumerator::Lazy will always require a block:
1757 *
1758 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1759 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1760 *
1761 * This class allows idiomatic calculations on long or infinite sequences, as well
1762 * as chaining of calculations without constructing intermediate arrays.
1763 *
1764 * Example for working with a slowly calculated sequence:
1765 *
1766 * require 'open-uri'
1767 *
1768 * # This will fetch all URLs before selecting
1769 * # necessary data
1770 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1771 * .select { |data| data.key?('stats') }
1772 * .first(5)
1773 *
1774 * # This will fetch URLs one-by-one, only till
1775 * # there is enough data to satisfy the condition
1776 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1777 * .select { |data| data.key?('stats') }
1778 * .first(5)
1779 *
1780 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1781 * is suitable for returning or passing to another method that expects
1782 * a normal enumerator.
1783 *
1784 * def active_items
1785 * groups
1786 * .lazy
1787 * .flat_map(&:items)
1788 * .reject(&:disabled)
1789 * .eager
1790 * end
1791 *
1792 * # This works lazily; if a checked item is found, it stops
1793 * # iteration and does not look into remaining groups.
1794 * first_checked = active_items.find(&:checked)
1795 *
1796 * # This returns an array of items like a normal enumerator does.
1797 * all_checked = active_items.select(&:checked)
1798 *
1799 */
1800
1801/*
1802 * call-seq:
1803 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1804 *
1805 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1806 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1807 * to the given block. The block can yield values back using +yielder+.
1808 * For example, to create a "filter+map" enumerator:
1809 *
1810 * def filter_map(sequence)
1811 * Lazy.new(sequence) do |yielder, *values|
1812 * result = yield *values
1813 * yielder << result if result
1814 * end
1815 * end
1816 *
1817 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1818 * #=> [4, 16, 36, 64, 100]
1819 */
1820static VALUE
1821lazy_initialize(int argc, VALUE *argv, VALUE self)
1822{
1823 VALUE obj, size = Qnil;
1825
1826 rb_check_arity(argc, 1, 2);
1827 LAZY_NEED_BLOCK(new);
1828 obj = argv[0];
1829 if (argc > 1) {
1830 size = argv[1];
1831 }
1832 generator = generator_allocate(rb_cGenerator);
1833 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1834 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1835 rb_ivar_set(self, id_receiver, obj);
1836
1837 return self;
1838}
1839
1840#if 0 /* for RDoc */
1841/*
1842 * call-seq:
1843 * lazy.to_a -> array
1844 * lazy.force -> array
1845 *
1846 * Expands +lazy+ enumerator to an array.
1847 * See Enumerable#to_a.
1848 */
1849static VALUE
1850lazy_to_a(VALUE self)
1851{
1852}
1853#endif
1854
1855static void
1856lazy_set_args(VALUE lazy, VALUE args)
1857{
1858 ID id = rb_frame_this_func();
1859 rb_ivar_set(lazy, id_method, ID2SYM(id));
1860 if (NIL_P(args)) {
1861 /* Qfalse indicates that the arguments are empty */
1862 rb_ivar_set(lazy, id_arguments, Qfalse);
1863 }
1864 else {
1865 rb_ivar_set(lazy, id_arguments, args);
1866 }
1867}
1868
1869#if 0
1870static VALUE
1871lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1872{
1873 struct enumerator *e = enumerator_ptr(lazy);
1874 lazy_set_args(lazy, args);
1875 e->size_fn = size_fn;
1876 return lazy;
1877}
1878#endif
1879
1880static VALUE
1881lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1882 const lazyenum_funcs *fn)
1883{
1884 struct enumerator *new_e;
1885 VALUE new_obj;
1886 VALUE new_generator;
1887 VALUE new_procs;
1888 struct enumerator *e = enumerator_ptr(obj);
1889 struct proc_entry *entry;
1891 &proc_entry_data_type, entry);
1892 if (rb_block_given_p()) {
1893 RB_OBJ_WRITE(entry_obj, &entry->proc, rb_block_proc());
1894 }
1895 entry->fn = fn;
1896 RB_OBJ_WRITE(entry_obj, &entry->memo, args);
1897
1898 lazy_set_args(entry_obj, memo);
1899
1900 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1901 new_generator = lazy_generator_init(obj, new_procs);
1902 rb_ary_push(new_procs, entry_obj);
1903
1904 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1905 new_e = RTYPEDDATA_GET_DATA(new_obj);
1906 RB_OBJ_WRITE(new_obj, &new_e->obj, new_generator);
1907 RB_OBJ_WRITE(new_obj, &new_e->procs, new_procs);
1908
1909 if (argc > 0) {
1910 new_e->meth = rb_to_id(*argv++);
1911 --argc;
1912 }
1913 else {
1914 new_e->meth = id_each;
1915 }
1916
1917 RB_OBJ_WRITE(new_obj, &new_e->args, rb_ary_new4(argc, argv));
1918
1919 return new_obj;
1920}
1921
1922/*
1923 * call-seq:
1924 * e.lazy -> lazy_enumerator
1925 *
1926 * Returns an Enumerator::Lazy, which redefines most Enumerable
1927 * methods to postpone enumeration and enumerate values only on an
1928 * as-needed basis.
1929 *
1930 * === Example
1931 *
1932 * The following program finds pythagorean triples:
1933 *
1934 * def pythagorean_triples
1935 * (1..Float::INFINITY).lazy.flat_map {|z|
1936 * (1..z).flat_map {|x|
1937 * (x..z).select {|y|
1938 * x**2 + y**2 == z**2
1939 * }.map {|y|
1940 * [x, y, z]
1941 * }
1942 * }
1943 * }
1944 * end
1945 * # show first ten pythagorean triples
1946 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1947 * p pythagorean_triples.first(10) # first is eager
1948 * # show pythagorean triples less than 100
1949 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1950 */
1951static VALUE
1952enumerable_lazy(VALUE obj)
1953{
1954 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1955 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1956 rb_ivar_set(result, id_method, Qfalse);
1957 return result;
1958}
1959
1960static VALUE
1961lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1962{
1963 return enumerator_init(enumerator_allocate(rb_cLazy),
1964 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1965}
1966
1967/*
1968 * call-seq:
1969 * lzy.to_enum(method = :each, *args) -> lazy_enum
1970 * lzy.enum_for(method = :each, *args) -> lazy_enum
1971 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1972 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1973 *
1974 * Similar to Object#to_enum, except it returns a lazy enumerator.
1975 * This makes it easy to define Enumerable methods that will
1976 * naturally remain lazy if called from a lazy enumerator.
1977 *
1978 * For example, continuing from the example in Object#to_enum:
1979 *
1980 * # See Object#to_enum for the definition of repeat
1981 * r = 1..Float::INFINITY
1982 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1983 * r.repeat(2).class # => Enumerator
1984 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1985 * # works naturally on lazy enumerator:
1986 * r.lazy.repeat(2).class # => Enumerator::Lazy
1987 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1988 */
1989
1990static VALUE
1991lazy_to_enum(int argc, VALUE *argv, VALUE self)
1992{
1993 VALUE lazy, meth = sym_each, super_meth;
1994
1995 if (argc > 0) {
1996 --argc;
1997 meth = *argv++;
1998 }
1999 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
2000 meth = super_meth;
2001 }
2002 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
2003 if (rb_block_given_p()) {
2004 RB_OBJ_WRITE(lazy, &enumerator_ptr(lazy)->size, rb_block_proc());
2005 }
2006 return lazy;
2007}
2008
2009static VALUE
2010lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
2011{
2012 return enum_size(self);
2013}
2014
2015/*
2016 * call-seq:
2017 * lzy.eager -> enum
2018 *
2019 * Returns a non-lazy Enumerator converted from the lazy enumerator.
2020 */
2021
2022static VALUE
2023lazy_eager(VALUE self)
2024{
2025 return enumerator_init(enumerator_allocate(rb_cEnumerator),
2026 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
2027}
2028
2029static VALUE
2030lazyenum_yield(VALUE proc_entry, struct MEMO *result)
2031{
2032 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2033 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2034}
2035
2036static VALUE
2037lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2038{
2039 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2040 int argc = 1;
2041 const VALUE *argv = &result->memo_value;
2042 if (LAZY_MEMO_PACKED_P(result)) {
2043 const VALUE args = *argv;
2044 argc = RARRAY_LENINT(args);
2045 argv = RARRAY_CONST_PTR(args);
2046 }
2047 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2048}
2049
2050static struct MEMO *
2051lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2052{
2053 VALUE value = lazyenum_yield_values(proc_entry, result);
2054 LAZY_MEMO_SET_VALUE(result, value);
2055 LAZY_MEMO_RESET_PACKED(result);
2056 return result;
2057}
2058
2059static VALUE
2060lazy_map_size(VALUE entry, VALUE receiver)
2061{
2062 return receiver;
2063}
2064
2065static const lazyenum_funcs lazy_map_funcs = {
2066 lazy_map_proc, lazy_map_size,
2067};
2068
2069/*
2070 * call-seq:
2071 * lazy.collect { |obj| block } -> lazy_enumerator
2072 * lazy.map { |obj| block } -> lazy_enumerator
2073 *
2074 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2075 *
2076 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2077 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2078 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2079 * #=> [1, 4, 9]
2080 */
2081
2082static VALUE
2083lazy_map(VALUE obj)
2084{
2085 LAZY_NEED_BLOCK(map);
2086 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2087}
2088
2090 struct MEMO *result;
2091 long index;
2092};
2093
2094static VALUE
2095lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2096{
2097 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2098
2099 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2100}
2101
2102static struct MEMO *
2103lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2104{
2105 VALUE value = lazyenum_yield_values(proc_entry, result);
2106 VALUE ary = 0;
2107 const long proc_index = memo_index + 1;
2108 int break_p = LAZY_MEMO_BREAK_P(result);
2109
2110 if (RB_TYPE_P(value, T_ARRAY)) {
2111 ary = value;
2112 }
2113 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2114 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2115 LAZY_MEMO_RESET_BREAK(result);
2116 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2117 if (break_p) LAZY_MEMO_SET_BREAK(result);
2118 return 0;
2119 }
2120
2121 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2122 long i;
2123 LAZY_MEMO_RESET_BREAK(result);
2124 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2125 const VALUE argv = RARRAY_AREF(ary, i);
2126 lazy_yielder_yield(result, proc_index, 1, &argv);
2127 }
2128 if (break_p) LAZY_MEMO_SET_BREAK(result);
2129 if (i >= RARRAY_LEN(ary)) return 0;
2130 value = RARRAY_AREF(ary, i);
2131 }
2132 LAZY_MEMO_SET_VALUE(result, value);
2133 LAZY_MEMO_RESET_PACKED(result);
2134 return result;
2135}
2136
2137static const lazyenum_funcs lazy_flat_map_funcs = {
2138 lazy_flat_map_proc, 0,
2139};
2140
2141/*
2142 * call-seq:
2143 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2144 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2145 *
2146 * Returns a new lazy enumerator with the concatenated results of running
2147 * +block+ once for every element in the lazy enumerator.
2148 *
2149 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2150 * #=> ["f", "o", "o", "b", "a", "r"]
2151 *
2152 * A value +x+ returned by +block+ is decomposed if either of
2153 * the following conditions is true:
2154 *
2155 * * +x+ responds to both each and force, which means that
2156 * +x+ is a lazy enumerator.
2157 * * +x+ is an array or responds to to_ary.
2158 *
2159 * Otherwise, +x+ is contained as-is in the return value.
2160 *
2161 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2162 * #=> [{:a=>1}, {:b=>2}]
2163 */
2164static VALUE
2165lazy_flat_map(VALUE obj)
2166{
2167 LAZY_NEED_BLOCK(flat_map);
2168 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2169}
2170
2171static struct MEMO *
2172lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2173{
2174 VALUE chain = lazyenum_yield(proc_entry, result);
2175 if (!RTEST(chain)) return 0;
2176 return result;
2177}
2178
2179static const lazyenum_funcs lazy_select_funcs = {
2180 lazy_select_proc, 0,
2181};
2182
2183/*
2184 * call-seq:
2185 * lazy.find_all { |obj| block } -> lazy_enumerator
2186 * lazy.select { |obj| block } -> lazy_enumerator
2187 * lazy.filter { |obj| block } -> lazy_enumerator
2188 *
2189 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2190 */
2191static VALUE
2192lazy_select(VALUE obj)
2193{
2194 LAZY_NEED_BLOCK(select);
2195 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2196}
2197
2198static struct MEMO *
2199lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2200{
2201 VALUE value = lazyenum_yield_values(proc_entry, result);
2202 if (!RTEST(value)) return 0;
2203 LAZY_MEMO_SET_VALUE(result, value);
2204 LAZY_MEMO_RESET_PACKED(result);
2205 return result;
2206}
2207
2208static const lazyenum_funcs lazy_filter_map_funcs = {
2209 lazy_filter_map_proc, 0,
2210};
2211
2212/*
2213 * call-seq:
2214 * lazy.filter_map { |obj| block } -> lazy_enumerator
2215 *
2216 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2217 *
2218 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2219 * #=> [4, 8, 12, 16, 20]
2220 */
2221
2222static VALUE
2223lazy_filter_map(VALUE obj)
2224{
2225 LAZY_NEED_BLOCK(filter_map);
2226 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2227}
2228
2229static struct MEMO *
2230lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2231{
2232 VALUE chain = lazyenum_yield(proc_entry, result);
2233 if (RTEST(chain)) return 0;
2234 return result;
2235}
2236
2237static const lazyenum_funcs lazy_reject_funcs = {
2238 lazy_reject_proc, 0,
2239};
2240
2241/*
2242 * call-seq:
2243 * lazy.reject { |obj| block } -> lazy_enumerator
2244 *
2245 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2246 */
2247
2248static VALUE
2249lazy_reject(VALUE obj)
2250{
2251 LAZY_NEED_BLOCK(reject);
2252 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2253}
2254
2255static struct MEMO *
2256lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2257{
2258 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2259 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2260 if (!RTEST(chain)) return 0;
2261 return result;
2262}
2263
2264static struct MEMO *
2265lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2266{
2267 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2268 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2269
2270 if (!RTEST(chain)) return 0;
2271 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2272 LAZY_MEMO_SET_VALUE(result, value);
2273 LAZY_MEMO_RESET_PACKED(result);
2274
2275 return result;
2276}
2277
2278static const lazyenum_funcs lazy_grep_iter_funcs = {
2279 lazy_grep_iter_proc, 0,
2280};
2281
2282static const lazyenum_funcs lazy_grep_funcs = {
2283 lazy_grep_proc, 0,
2284};
2285
2286/*
2287 * call-seq:
2288 * lazy.grep(pattern) -> lazy_enumerator
2289 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2290 *
2291 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2292 */
2293
2294static VALUE
2295lazy_grep(VALUE obj, VALUE pattern)
2296{
2297 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2298 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2299 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2300}
2301
2302static struct MEMO *
2303lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2304{
2305 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2306 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2307 if (RTEST(chain)) return 0;
2308 return result;
2309}
2310
2311static struct MEMO *
2312lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2313{
2314 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2315 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2316
2317 if (RTEST(chain)) return 0;
2318 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2319 LAZY_MEMO_SET_VALUE(result, value);
2320 LAZY_MEMO_RESET_PACKED(result);
2321
2322 return result;
2323}
2324
2325static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2326 lazy_grep_v_iter_proc, 0,
2327};
2328
2329static const lazyenum_funcs lazy_grep_v_funcs = {
2330 lazy_grep_v_proc, 0,
2331};
2332
2333/*
2334 * call-seq:
2335 * lazy.grep_v(pattern) -> lazy_enumerator
2336 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2337 *
2338 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2339 */
2340
2341static VALUE
2342lazy_grep_v(VALUE obj, VALUE pattern)
2343{
2344 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2345 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2346 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2347}
2348
2349static VALUE
2350call_next(VALUE obj)
2351{
2352 return rb_funcall(obj, id_next, 0);
2353}
2354
2355static VALUE
2356next_stopped(VALUE obj, VALUE _)
2357{
2358 return Qnil;
2359}
2360
2361static struct MEMO *
2362lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2363{
2364 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2365 VALUE ary, arrays = entry->memo;
2366 VALUE memo = rb_ary_entry(memos, memo_index);
2367 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2368
2369 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2370 rb_ary_push(ary, result->memo_value);
2371 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2372 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2373 }
2374 LAZY_MEMO_SET_VALUE(result, ary);
2375 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2376 return result;
2377}
2378
2379static struct MEMO *
2380lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2381{
2382 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2383 VALUE arg = rb_ary_entry(memos, memo_index);
2384 VALUE zip_args = entry->memo;
2385 VALUE ary, v;
2386 long i;
2387
2388 if (NIL_P(arg)) {
2389 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2390 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2391 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2392 }
2393 rb_ary_store(memos, memo_index, arg);
2394 }
2395
2396 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2397 rb_ary_push(ary, result->memo_value);
2398 for (i = 0; i < RARRAY_LEN(arg); i++) {
2399 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2401 rb_ary_push(ary, v);
2402 }
2403 LAZY_MEMO_SET_VALUE(result, ary);
2404 return result;
2405}
2406
2407static const lazyenum_funcs lazy_zip_funcs[] = {
2408 {lazy_zip_func, lazy_receiver_size,},
2409 {lazy_zip_arrays_func, lazy_receiver_size,},
2410};
2411
2412/*
2413 * call-seq:
2414 * lazy.zip(arg, ...) -> lazy_enumerator
2415 * lazy.zip(arg, ...) { |arr| block } -> nil
2416 *
2417 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2418 * However, if a block is given to zip, values are enumerated immediately.
2419 */
2420static VALUE
2421lazy_zip(int argc, VALUE *argv, VALUE obj)
2422{
2423 VALUE ary, v;
2424 long i;
2425 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2426
2427 if (rb_block_given_p()) {
2428 return rb_call_super(argc, argv);
2429 }
2430
2431 ary = rb_ary_new2(argc);
2432 for (i = 0; i < argc; i++) {
2433 v = rb_check_array_type(argv[i]);
2434 if (NIL_P(v)) {
2435 for (; i < argc; i++) {
2436 if (!rb_respond_to(argv[i], id_each)) {
2437 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2438 rb_obj_class(argv[i]));
2439 }
2440 }
2441 ary = rb_ary_new4(argc, argv);
2442 funcs = &lazy_zip_funcs[0];
2443 break;
2444 }
2445 rb_ary_push(ary, v);
2446 }
2447
2448 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2449}
2450
2451static struct MEMO *
2452lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2453{
2454 long remain;
2455 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2456 VALUE memo = rb_ary_entry(memos, memo_index);
2457
2458 if (NIL_P(memo)) {
2459 memo = entry->memo;
2460 }
2461
2462 remain = NUM2LONG(memo);
2463 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2464 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2465 return result;
2466}
2467
2468static VALUE
2469lazy_take_size(VALUE entry, VALUE receiver)
2470{
2471 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2472 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2473 return receiver;
2474 return LONG2NUM(len);
2475}
2476
2477static int
2478lazy_take_precheck(VALUE proc_entry)
2479{
2480 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2481 return entry->memo != INT2FIX(0);
2482}
2483
2484static const lazyenum_funcs lazy_take_funcs = {
2485 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2486};
2487
2488/*
2489 * call-seq:
2490 * lazy.take(n) -> lazy_enumerator
2491 *
2492 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2493 */
2494
2495static VALUE
2496lazy_take(VALUE obj, VALUE n)
2497{
2498 long len = NUM2LONG(n);
2499
2500 if (len < 0) {
2501 rb_raise(rb_eArgError, "attempt to take negative size");
2502 }
2503
2504 n = LONG2NUM(len); /* no more conversion */
2505
2506 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2507}
2508
2509static struct MEMO *
2510lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2511{
2512 VALUE take = lazyenum_yield_values(proc_entry, result);
2513 if (!RTEST(take)) {
2514 LAZY_MEMO_SET_BREAK(result);
2515 return 0;
2516 }
2517 return result;
2518}
2519
2520static const lazyenum_funcs lazy_take_while_funcs = {
2521 lazy_take_while_proc, 0,
2522};
2523
2524/*
2525 * call-seq:
2526 * lazy.take_while { |obj| block } -> lazy_enumerator
2527 *
2528 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2529 */
2530
2531static VALUE
2532lazy_take_while(VALUE obj)
2533{
2534 LAZY_NEED_BLOCK(take_while);
2535 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2536}
2537
2538static VALUE
2539lazy_drop_size(VALUE proc_entry, VALUE receiver)
2540{
2541 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2542 if (NIL_P(receiver))
2543 return receiver;
2544 if (FIXNUM_P(receiver)) {
2545 len = FIX2LONG(receiver) - len;
2546 return LONG2FIX(len < 0 ? 0 : len);
2547 }
2548 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2549}
2550
2551static struct MEMO *
2552lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2553{
2554 long remain;
2555 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2556 VALUE memo = rb_ary_entry(memos, memo_index);
2557
2558 if (NIL_P(memo)) {
2559 memo = entry->memo;
2560 }
2561 remain = NUM2LONG(memo);
2562 if (remain > 0) {
2563 --remain;
2564 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2565 return 0;
2566 }
2567
2568 return result;
2569}
2570
2571static const lazyenum_funcs lazy_drop_funcs = {
2572 lazy_drop_proc, lazy_drop_size,
2573};
2574
2575/*
2576 * call-seq:
2577 * lazy.drop(n) -> lazy_enumerator
2578 *
2579 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2580 */
2581
2582static VALUE
2583lazy_drop(VALUE obj, VALUE n)
2584{
2585 long len = NUM2LONG(n);
2586 VALUE argv[2];
2587 argv[0] = sym_each;
2588 argv[1] = n;
2589
2590 if (len < 0) {
2591 rb_raise(rb_eArgError, "attempt to drop negative size");
2592 }
2593
2594 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2595}
2596
2597static struct MEMO *
2598lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2599{
2600 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2601 VALUE memo = rb_ary_entry(memos, memo_index);
2602
2603 if (NIL_P(memo)) {
2604 memo = entry->memo;
2605 }
2606
2607 if (!RTEST(memo)) {
2608 VALUE drop = lazyenum_yield_values(proc_entry, result);
2609 if (RTEST(drop)) return 0;
2610 rb_ary_store(memos, memo_index, Qtrue);
2611 }
2612 return result;
2613}
2614
2615static const lazyenum_funcs lazy_drop_while_funcs = {
2616 lazy_drop_while_proc, 0,
2617};
2618
2619/*
2620 * call-seq:
2621 * lazy.drop_while { |obj| block } -> lazy_enumerator
2622 *
2623 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2624 */
2625
2626static VALUE
2627lazy_drop_while(VALUE obj)
2628{
2629 LAZY_NEED_BLOCK(drop_while);
2630 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2631}
2632
2633static int
2634lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2635{
2636 VALUE hash = rb_ary_entry(memos, memo_index);
2637
2638 if (NIL_P(hash)) {
2639 hash = rb_obj_hide(rb_hash_new());
2640 rb_ary_store(memos, memo_index, hash);
2641 }
2642
2643 return rb_hash_add_new_element(hash, chain, Qfalse);
2644}
2645
2646static struct MEMO *
2647lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2648{
2649 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2650 return result;
2651}
2652
2653static struct MEMO *
2654lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2655{
2656 VALUE chain = lazyenum_yield(proc_entry, result);
2657
2658 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2659 return result;
2660}
2661
2662static const lazyenum_funcs lazy_uniq_iter_funcs = {
2663 lazy_uniq_iter_proc, 0,
2664};
2665
2666static const lazyenum_funcs lazy_uniq_funcs = {
2667 lazy_uniq_proc, 0,
2668};
2669
2670/*
2671 * call-seq:
2672 * lazy.uniq -> lazy_enumerator
2673 * lazy.uniq { |item| block } -> lazy_enumerator
2674 *
2675 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2676 */
2677
2678static VALUE
2679lazy_uniq(VALUE obj)
2680{
2681 const lazyenum_funcs *const funcs =
2682 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2683 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2684}
2685
2686static struct MEMO *
2687lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2688{
2689 if (NIL_P(result->memo_value)) return 0;
2690 return result;
2691}
2692
2693static const lazyenum_funcs lazy_compact_funcs = {
2694 lazy_compact_proc, 0,
2695};
2696
2697/*
2698 * call-seq:
2699 * lazy.compact -> lazy_enumerator
2700 *
2701 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2702 */
2703
2704static VALUE
2705lazy_compact(VALUE obj)
2706{
2707 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2708}
2709
2710static struct MEMO *
2711lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2712{
2713 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2714 VALUE memo = rb_ary_entry(memos, memo_index);
2715 VALUE argv[2];
2716
2717 if (NIL_P(memo)) {
2718 memo = entry->memo;
2719 }
2720
2721 argv[0] = result->memo_value;
2722 argv[1] = memo;
2723 if (entry->proc) {
2724 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2725 LAZY_MEMO_RESET_PACKED(result);
2726 }
2727 else {
2728 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2729 LAZY_MEMO_SET_PACKED(result);
2730 }
2731 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2732 return result;
2733}
2734
2735static VALUE
2736lazy_with_index_size(VALUE proc, VALUE receiver)
2737{
2738 return receiver;
2739}
2740
2741static const lazyenum_funcs lazy_with_index_funcs = {
2742 lazy_with_index_proc, lazy_with_index_size,
2743};
2744
2745/*
2746 * call-seq:
2747 * lazy.with_index(offset = 0) {|(*args), idx| block }
2748 * lazy.with_index(offset = 0)
2749 *
2750 * If a block is given, returns a lazy enumerator that will
2751 * iterate over the given block for each element
2752 * with an index, which starts from +offset+, and returns a
2753 * lazy enumerator that yields the same values (without the index).
2754 *
2755 * If a block is not given, returns a new lazy enumerator that
2756 * includes the index, starting from +offset+.
2757 *
2758 * +offset+:: the starting index to use
2759 *
2760 * See Enumerator#with_index.
2761 */
2762static VALUE
2763lazy_with_index(int argc, VALUE *argv, VALUE obj)
2764{
2765 VALUE memo;
2766
2767 rb_scan_args(argc, argv, "01", &memo);
2768 if (NIL_P(memo))
2769 memo = LONG2NUM(0);
2770
2771 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2772}
2773
2774static struct MEMO *
2775lazy_tap_each_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2776{
2777 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2778
2779 rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2780
2781 return result;
2782}
2783
2784static const lazyenum_funcs lazy_tap_each_funcs = {
2785 lazy_tap_each_proc, 0,
2786};
2787
2788/*
2789 * call-seq:
2790 * lazy.tap_each { |item| ... } -> lazy_enumerator
2791 *
2792 * Passes each element through to the block for side effects only,
2793 * without modifying the element or affecting the enumeration.
2794 * Returns a new lazy enumerator.
2795 *
2796 * This is useful for debugging or logging inside lazy chains,
2797 * without breaking laziness or misusing +map+.
2798 *
2799 * (1..).lazy
2800 * .tap_each { |x| puts "got #{x}" }
2801 * .select(&:even?)
2802 * .first(3)
2803 * # prints: got 1, got 2, ..., got 6
2804 * # returns: [2, 4, 6]
2805 *
2806 * Similar in intent to Java's Stream#peek.
2807 */
2808
2809static VALUE
2810lazy_tap_each(VALUE obj)
2811{
2812 if (!rb_block_given_p())
2813 {
2814 rb_raise(rb_eArgError, "tried to call lazy tap_each without a block");
2815 }
2816
2817 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_tap_each_funcs);
2818}
2819
2820#if 0 /* for RDoc */
2821
2822/*
2823 * call-seq:
2824 * lazy.chunk { |elt| ... } -> lazy_enumerator
2825 *
2826 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2827 */
2828static VALUE
2829lazy_chunk(VALUE self)
2830{
2831}
2832
2833/*
2834 * call-seq:
2835 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2836 *
2837 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2838 */
2839static VALUE
2840lazy_chunk_while(VALUE self)
2841{
2842}
2843
2844/*
2845 * call-seq:
2846 * lazy.slice_after(pattern) -> lazy_enumerator
2847 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2848 *
2849 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2850 */
2851static VALUE
2852lazy_slice_after(VALUE self)
2853{
2854}
2855
2856/*
2857 * call-seq:
2858 * lazy.slice_before(pattern) -> lazy_enumerator
2859 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2860 *
2861 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2862 */
2863static VALUE
2864lazy_slice_before(VALUE self)
2865{
2866}
2867
2868/*
2869 * call-seq:
2870 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2871 *
2872 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2873 */
2874static VALUE
2875lazy_slice_when(VALUE self)
2876{
2877}
2878# endif
2879
2880static VALUE
2881lazy_super(int argc, VALUE *argv, VALUE lazy)
2882{
2883 return enumerable_lazy(rb_call_super(argc, argv));
2884}
2885
2886/*
2887 * call-seq:
2888 * enum.lazy -> lazy_enumerator
2889 *
2890 * Returns self.
2891 */
2892
2893static VALUE
2894lazy_lazy(VALUE obj)
2895{
2896 return obj;
2897}
2898
2899/*
2900 * Document-class: StopIteration
2901 *
2902 * Raised to stop the iteration, in particular by Enumerator#next. It is
2903 * rescued by Kernel#loop.
2904 *
2905 * loop do
2906 * puts "Hello"
2907 * raise StopIteration
2908 * puts "World"
2909 * end
2910 * puts "Done!"
2911 *
2912 * <em>produces:</em>
2913 *
2914 * Hello
2915 * Done!
2916 */
2917
2918/*
2919 * call-seq:
2920 * result -> value
2921 *
2922 * Returns the return value of the iterator.
2923 *
2924 * o = Object.new
2925 * def o.each
2926 * yield 1
2927 * yield 2
2928 * yield 3
2929 * 100
2930 * end
2931 *
2932 * e = o.to_enum
2933 *
2934 * puts e.next #=> 1
2935 * puts e.next #=> 2
2936 * puts e.next #=> 3
2937 *
2938 * begin
2939 * e.next
2940 * rescue StopIteration => ex
2941 * puts ex.result #=> 100
2942 * end
2943 *
2944 */
2945
2946static VALUE
2947stop_result(VALUE self)
2948{
2949 return rb_attr_get(self, id_result);
2950}
2951
2952/*
2953 * Producer
2954 */
2955
2956static void
2957producer_mark_and_move(void *p)
2958{
2959 struct producer *ptr = p;
2960 rb_gc_mark_and_move(&ptr->init);
2961 rb_gc_mark_and_move(&ptr->proc);
2962 rb_gc_mark_and_move(&ptr->size);
2963}
2964
2965#define producer_free RUBY_TYPED_DEFAULT_FREE
2966
2967static size_t
2968producer_memsize(const void *p)
2969{
2970 return sizeof(struct producer);
2971}
2972
2973static const rb_data_type_t producer_data_type = {
2974 "producer",
2975 {
2976 producer_mark_and_move,
2977 producer_free,
2978 producer_memsize,
2979 producer_mark_and_move,
2980 },
2981 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
2982};
2983
2984static struct producer *
2985producer_ptr(VALUE obj)
2986{
2987 struct producer *ptr;
2988
2989 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2990 if (!ptr || UNDEF_P(ptr->proc)) {
2991 rb_raise(rb_eArgError, "uninitialized producer");
2992 }
2993 return ptr;
2994}
2995
2996/* :nodoc: */
2997static VALUE
2998producer_allocate(VALUE klass)
2999{
3000 struct producer *ptr;
3001 VALUE obj;
3002
3003 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
3004 ptr->init = Qundef;
3005 ptr->proc = Qundef;
3006 ptr->size = Qnil;
3007
3008 return obj;
3009}
3010
3011static VALUE
3012producer_init(VALUE obj, VALUE init, VALUE proc, VALUE size)
3013{
3014 struct producer *ptr;
3015
3016 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
3017
3018 if (!ptr) {
3019 rb_raise(rb_eArgError, "unallocated producer");
3020 }
3021
3022 RB_OBJ_WRITE(obj, &ptr->init, init);
3023 RB_OBJ_WRITE(obj, &ptr->proc, proc);
3024 RB_OBJ_WRITE(obj, &ptr->size, size);
3025
3026 return obj;
3027}
3028
3029static VALUE
3030producer_each_stop(VALUE dummy, VALUE exc)
3031{
3032 return rb_attr_get(exc, id_result);
3033}
3034
3035NORETURN(static VALUE producer_each_i(VALUE obj));
3036
3037static VALUE
3038producer_each_i(VALUE obj)
3039{
3040 struct producer *ptr;
3041 VALUE init, proc, curr;
3042
3043 ptr = producer_ptr(obj);
3044 init = ptr->init;
3045 proc = ptr->proc;
3046
3047 if (UNDEF_P(init)) {
3048 curr = Qnil;
3049 }
3050 else {
3051 rb_yield(init);
3052 curr = init;
3053 }
3054
3055 for (;;) {
3056 curr = rb_funcall(proc, id_call, 1, curr);
3057 rb_yield(curr);
3058 }
3059
3061}
3062
3063/* :nodoc: */
3064static VALUE
3065producer_each(VALUE obj)
3066{
3067 rb_need_block();
3068
3069 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
3070}
3071
3072static VALUE
3073producer_size(VALUE obj, VALUE args, VALUE eobj)
3074{
3075 struct producer *ptr = producer_ptr(obj);
3076 VALUE size = ptr->size;
3077
3078 if (NIL_P(size)) return Qnil;
3079 if (RB_INTEGER_TYPE_P(size) || RB_FLOAT_TYPE_P(size)) return size;
3080
3081 return rb_funcall(size, id_call, 0);
3082}
3083
3084/*
3085 * call-seq:
3086 * Enumerator.produce(initial = nil, size: nil) { |prev| block } -> enumerator
3087 *
3088 * Creates an infinite enumerator from any block, just called over and
3089 * over. The result of the previous iteration is passed to the next one.
3090 * If +initial+ is provided, it is passed to the first iteration, and
3091 * becomes the first element of the enumerator; if it is not provided,
3092 * the first iteration receives +nil+, and its result becomes the first
3093 * element of the iterator.
3094 *
3095 * Raising StopIteration from the block stops an iteration.
3096 *
3097 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3098 *
3099 * Enumerator.produce { rand(10) } # => infinite random number sequence
3100 *
3101 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3102 * enclosing_section = ancestors.find { |n| n.type == :section }
3103 *
3104 * Using ::produce together with Enumerable methods like Enumerable#detect,
3105 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3106 * for +while+ and +until+ cycles:
3107 *
3108 * # Find next Tuesday
3109 * require "date"
3110 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3111 *
3112 * # Simple lexer:
3113 * require "strscan"
3114 * scanner = StringScanner.new("7+38/6")
3115 * PATTERN = %r{\d+|[-/+*]}
3116 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3117 * # => ["7", "+", "38", "/", "6"]
3118 *
3119 * The optional +size+ keyword argument specifies the size of the enumerator,
3120 * which can be retrieved by Enumerator#size. It can be an integer,
3121 * +Float::INFINITY+, a callable object (such as a lambda), or +nil+ to
3122 * indicate unknown size. When not specified, the size defaults to
3123 * +Float::INFINITY+.
3124 *
3125 * # Infinite enumerator
3126 * enum = Enumerator.produce(1, size: Float::INFINITY, &:succ)
3127 * enum.size # => Float::INFINITY
3128 *
3129 * # Finite enumerator with known/computable size
3130 * abs_dir = File.expand_path("./baz") # => "/foo/bar/baz"
3131 * traverser = Enumerator.produce(abs_dir, size: -> { abs_dir.count("/") + 1 }) {
3132 * raise StopIteration if it == "/"
3133 * File.dirname(it)
3134 * }
3135 * traverser.size # => 4
3136 *
3137 * # Finite enumerator with unknown size
3138 * calendar = Enumerator.produce(Date.today, size: nil) {
3139 * it.monday? ? raise(StopIteration) : it + 1
3140 * }
3141 * calendar.size # => nil
3142 */
3143static VALUE
3144enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3145{
3146 VALUE init, producer, opts, size;
3147 ID keyword_ids[1];
3148
3149 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3150
3151 keyword_ids[0] = rb_intern("size");
3152 rb_scan_args_kw(RB_SCAN_ARGS_LAST_HASH_KEYWORDS, argc, argv, "01:", &init, &opts);
3153 rb_get_kwargs(opts, keyword_ids, 0, 1, &size);
3154
3155 size = UNDEF_P(size) ? DBL2NUM(HUGE_VAL) : convert_to_feasible_size_value(size);
3156
3157 if (argc == 0 || (argc == 1 && !NIL_P(opts))) {
3158 init = Qundef;
3159 }
3160
3161 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc(), size);
3162
3163 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3164}
3165
3166/*
3167 * Document-class: Enumerator::Chain
3168 *
3169 * Enumerator::Chain is a subclass of Enumerator, which represents a
3170 * chain of enumerables that works as a single enumerator.
3171 *
3172 * This type of objects can be created by Enumerable#chain and
3173 * Enumerator#+.
3174 */
3175
3176static void
3177enum_chain_mark_and_move(void *p)
3178{
3179 struct enum_chain *ptr = p;
3180 rb_gc_mark_and_move(&ptr->enums);
3181}
3182
3183#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3184
3185static size_t
3186enum_chain_memsize(const void *p)
3187{
3188 return sizeof(struct enum_chain);
3189}
3190
3191static const rb_data_type_t enum_chain_data_type = {
3192 "chain",
3193 {
3194 enum_chain_mark_and_move,
3195 enum_chain_free,
3196 enum_chain_memsize,
3197 enum_chain_mark_and_move,
3198 },
3199 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3200};
3201
3202static struct enum_chain *
3203enum_chain_ptr(VALUE obj)
3204{
3205 struct enum_chain *ptr;
3206
3207 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3208 if (!ptr || UNDEF_P(ptr->enums)) {
3209 rb_raise(rb_eArgError, "uninitialized chain");
3210 }
3211 return ptr;
3212}
3213
3214/* :nodoc: */
3215static VALUE
3216enum_chain_allocate(VALUE klass)
3217{
3218 struct enum_chain *ptr;
3219 VALUE obj;
3220
3221 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3222 ptr->enums = Qundef;
3223 ptr->pos = -1;
3224
3225 return obj;
3226}
3227
3228/*
3229 * call-seq:
3230 * Enumerator::Chain.new(*enums) -> enum
3231 *
3232 * Generates a new enumerator object that iterates over the elements
3233 * of given enumerable objects in sequence.
3234 *
3235 * e = Enumerator::Chain.new(1..3, [4, 5])
3236 * e.to_a #=> [1, 2, 3, 4, 5]
3237 * e.size #=> 5
3238 */
3239static VALUE
3240enum_chain_initialize(VALUE obj, VALUE enums)
3241{
3242 struct enum_chain *ptr;
3243
3244 rb_check_frozen(obj);
3245 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3246
3247 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3248
3249 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3250 ptr->pos = -1;
3251
3252 return obj;
3253}
3254
3255static VALUE
3256new_enum_chain(VALUE enums)
3257{
3258 long i;
3259 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3260
3261 for (i = 0; i < RARRAY_LEN(enums); i++) {
3262 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3263 return enumerable_lazy(obj);
3264 }
3265 }
3266
3267 return obj;
3268}
3269
3270/* :nodoc: */
3271static VALUE
3272enum_chain_init_copy(VALUE obj, VALUE orig)
3273{
3274 struct enum_chain *ptr0, *ptr1;
3275
3276 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3277 ptr0 = enum_chain_ptr(orig);
3278
3279 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3280
3281 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3282
3283 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3284 ptr1->pos = ptr0->pos;
3285
3286 return obj;
3287}
3288
3289static VALUE
3290enum_chain_total_size(VALUE enums)
3291{
3292 VALUE total = INT2FIX(0);
3293 long i;
3294
3295 for (i = 0; i < RARRAY_LEN(enums); i++) {
3296 VALUE size = enum_size(RARRAY_AREF(enums, i));
3297
3298 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3299 return size;
3300 }
3301 if (!RB_INTEGER_TYPE_P(size)) {
3302 return Qnil;
3303 }
3304
3305 total = rb_funcall(total, '+', 1, size);
3306 }
3307
3308 return total;
3309}
3310
3311/*
3312 * call-seq:
3313 * obj.size -> int, Float::INFINITY or nil
3314 *
3315 * Returns the total size of the enumerator chain calculated by
3316 * summing up the size of each enumerable in the chain. If any of the
3317 * enumerables reports its size as nil or Float::INFINITY, that value
3318 * is returned as the total size.
3319 */
3320static VALUE
3321enum_chain_size(VALUE obj)
3322{
3323 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3324}
3325
3326static VALUE
3327enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3328{
3329 return enum_chain_size(obj);
3330}
3331
3332static VALUE
3333enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3334{
3335 return Qnil;
3336}
3337
3338/*
3339 * call-seq:
3340 * obj.each(*args) { |...| ... } -> obj
3341 * obj.each(*args) -> enumerator
3342 *
3343 * Iterates over the elements of the first enumerable by calling the
3344 * "each" method on it with the given arguments, then proceeds to the
3345 * following enumerables in sequence until all of the enumerables are
3346 * exhausted.
3347 *
3348 * If no block is given, returns an enumerator.
3349 */
3350static VALUE
3351enum_chain_each(int argc, VALUE *argv, VALUE obj)
3352{
3353 VALUE enums, block;
3354 struct enum_chain *objptr;
3355 long i;
3356
3357 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3358
3359 objptr = enum_chain_ptr(obj);
3360 enums = objptr->enums;
3361 block = rb_block_proc();
3362
3363 for (i = 0; i < RARRAY_LEN(enums); i++) {
3364 objptr->pos = i;
3365 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3366 }
3367
3368 return obj;
3369}
3370
3371/*
3372 * call-seq:
3373 * obj.rewind -> obj
3374 *
3375 * Rewinds the enumerator chain by calling the "rewind" method on each
3376 * enumerable in reverse order. Each call is performed only if the
3377 * enumerable responds to the method.
3378 */
3379static VALUE
3380enum_chain_rewind(VALUE obj)
3381{
3382 struct enum_chain *objptr = enum_chain_ptr(obj);
3383 VALUE enums = objptr->enums;
3384 long i;
3385
3386 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3387 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3388 }
3389
3390 return obj;
3391}
3392
3393static VALUE
3394inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3395{
3396 VALUE klass = rb_obj_class(obj);
3397 struct enum_chain *ptr;
3398
3399 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3400
3401 if (!ptr || UNDEF_P(ptr->enums)) {
3402 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3403 }
3404
3405 if (recur) {
3406 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3407 }
3408
3409 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3410}
3411
3412/*
3413 * call-seq:
3414 * obj.inspect -> string
3415 *
3416 * Returns a printable version of the enumerator chain.
3417 */
3418static VALUE
3419enum_chain_inspect(VALUE obj)
3420{
3421 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3422}
3423
3424/*
3425 * call-seq:
3426 * e.chain(*enums) -> enumerator
3427 *
3428 * Returns an enumerator object generated from this enumerator and
3429 * given enumerables.
3430 *
3431 * e = (1..3).chain([4, 5])
3432 * e.to_a #=> [1, 2, 3, 4, 5]
3433 */
3434static VALUE
3435enum_chain(int argc, VALUE *argv, VALUE obj)
3436{
3437 VALUE enums = rb_ary_new_from_values(1, &obj);
3438 rb_ary_cat(enums, argv, argc);
3439 return new_enum_chain(enums);
3440}
3441
3442/*
3443 * call-seq:
3444 * e + enum -> enumerator
3445 *
3446 * Returns an enumerator object generated from this enumerator and a
3447 * given enumerable.
3448 *
3449 * e = (1..3).each + [4, 5]
3450 * e.to_a #=> [1, 2, 3, 4, 5]
3451 */
3452static VALUE
3453enumerator_plus(VALUE obj, VALUE eobj)
3454{
3455 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3456}
3457
3458/*
3459 * Document-class: Enumerator::Product
3460 *
3461 * Enumerator::Product generates a Cartesian product of any number of
3462 * enumerable objects. Iterating over the product of enumerable
3463 * objects is roughly equivalent to nested each_entry loops where the
3464 * loop for the rightmost object is put innermost.
3465 *
3466 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3467 *
3468 * innings.each do |i, h|
3469 * p [i, h]
3470 * end
3471 * # [1, "top"]
3472 * # [1, "bottom"]
3473 * # [2, "top"]
3474 * # [2, "bottom"]
3475 * # [3, "top"]
3476 * # [3, "bottom"]
3477 * # ...
3478 * # [9, "top"]
3479 * # [9, "bottom"]
3480 *
3481 * The method used against each enumerable object is `each_entry`
3482 * instead of `each` so that the product of N enumerable objects
3483 * yields an array of exactly N elements in each iteration.
3484 *
3485 * When no enumerator is given, it calls a given block once yielding
3486 * an empty argument list.
3487 *
3488 * This type of objects can be created by Enumerator.product.
3489 */
3490
3491static void
3492enum_product_mark_and_move(void *p)
3493{
3494 struct enum_product *ptr = p;
3495 rb_gc_mark_and_move(&ptr->enums);
3496}
3497
3498#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3499
3500static size_t
3501enum_product_memsize(const void *p)
3502{
3503 return sizeof(struct enum_product);
3504}
3505
3506static const rb_data_type_t enum_product_data_type = {
3507 "product",
3508 {
3509 enum_product_mark_and_move,
3510 enum_product_free,
3511 enum_product_memsize,
3512 enum_product_mark_and_move,
3513 },
3514 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3515};
3516
3517static struct enum_product *
3518enum_product_ptr(VALUE obj)
3519{
3520 struct enum_product *ptr;
3521
3522 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3523 if (!ptr || UNDEF_P(ptr->enums)) {
3524 rb_raise(rb_eArgError, "uninitialized product");
3525 }
3526 return ptr;
3527}
3528
3529/* :nodoc: */
3530static VALUE
3531enum_product_allocate(VALUE klass)
3532{
3533 struct enum_product *ptr;
3534 VALUE obj;
3535
3536 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3537 ptr->enums = Qundef;
3538
3539 return obj;
3540}
3541
3542/*
3543 * call-seq:
3544 * Enumerator::Product.new(*enums) -> enum
3545 *
3546 * Generates a new enumerator object that generates a Cartesian
3547 * product of given enumerable objects.
3548 *
3549 * e = Enumerator::Product.new(1..3, [4, 5])
3550 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3551 * e.size #=> 6
3552 */
3553static VALUE
3554enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3555{
3556 struct enum_product *ptr;
3557 VALUE enums = Qnil, options = Qnil;
3558
3559 rb_scan_args(argc, argv, "*:", &enums, &options);
3560
3561 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3562 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3563 }
3564
3565 rb_check_frozen(obj);
3566 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3567
3568 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3569
3570 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3571
3572 return obj;
3573}
3574
3575/* :nodoc: */
3576static VALUE
3577enum_product_init_copy(VALUE obj, VALUE orig)
3578{
3579 struct enum_product *ptr0, *ptr1;
3580
3581 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3582 ptr0 = enum_product_ptr(orig);
3583
3584 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3585
3586 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3587
3588 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3589
3590 return obj;
3591}
3592
3593static VALUE
3594enum_product_total_size(VALUE enums)
3595{
3596 VALUE total = INT2FIX(1);
3597 VALUE sizes = rb_ary_hidden_new(RARRAY_LEN(enums));
3598 long i;
3599
3600 for (i = 0; i < RARRAY_LEN(enums); i++) {
3601 VALUE size = enum_size(RARRAY_AREF(enums, i));
3602 if (size == INT2FIX(0)) {
3603 rb_ary_resize(sizes, 0);
3604 return size;
3605 }
3606 rb_ary_push(sizes, size);
3607 }
3608 for (i = 0; i < RARRAY_LEN(sizes); i++) {
3609 VALUE size = RARRAY_AREF(sizes, i);
3610
3611 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3612 return size;
3613 }
3614 if (!RB_INTEGER_TYPE_P(size)) {
3615 return Qnil;
3616 }
3617
3618 total = rb_funcall(total, '*', 1, size);
3619 }
3620
3621 return total;
3622}
3623
3624/*
3625 * call-seq:
3626 * obj.size -> int, Float::INFINITY or nil
3627 *
3628 * Returns the total size of the enumerator product calculated by
3629 * multiplying the sizes of enumerables in the product. If any of the
3630 * enumerables reports its size as nil or Float::INFINITY, that value
3631 * is returned as the size.
3632 */
3633static VALUE
3634enum_product_size(VALUE obj)
3635{
3636 return enum_product_total_size(enum_product_ptr(obj)->enums);
3637}
3638
3639static VALUE
3640enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3641{
3642 return enum_product_size(obj);
3643}
3644
3646 VALUE obj;
3647 VALUE block;
3648 int index;
3649 int argc;
3650 VALUE *argv;
3651};
3652
3653static VALUE product_each(VALUE, struct product_state *);
3654
3655static VALUE
3656product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3657{
3658 struct product_state *pstate = (struct product_state *)state;
3659 pstate->argv[pstate->index++] = value;
3660
3661 VALUE val = product_each(pstate->obj, pstate);
3662 pstate->index--;
3663 return val;
3664}
3665
3666static VALUE
3667product_each(VALUE obj, struct product_state *pstate)
3668{
3669 struct enum_product *ptr = enum_product_ptr(obj);
3670 VALUE enums = ptr->enums;
3671
3672 if (pstate->index < pstate->argc) {
3673 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3674
3675 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3676 }
3677 else {
3678 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3679 }
3680
3681 return obj;
3682}
3683
3684static VALUE
3685enum_product_run(VALUE obj, VALUE block)
3686{
3687 struct enum_product *ptr = enum_product_ptr(obj);
3688 int argc = RARRAY_LENINT(ptr->enums);
3689 if (argc == 0) { /* no need to allocate state.argv */
3690 rb_funcall(block, id_call, 1, rb_ary_new());
3691 return obj;
3692 }
3693
3694 VALUE argsbuf = 0;
3695 struct product_state state = {
3696 .obj = obj,
3697 .block = block,
3698 .index = 0,
3699 .argc = argc,
3700 .argv = ALLOCV_N(VALUE, argsbuf, argc),
3701 };
3702
3703 VALUE ret = product_each(obj, &state);
3704 ALLOCV_END(argsbuf);
3705 return ret;
3706}
3707
3708/*
3709 * call-seq:
3710 * obj.each { |...| ... } -> obj
3711 * obj.each -> enumerator
3712 *
3713 * Iterates over the elements of the first enumerable by calling the
3714 * "each_entry" method on it with the given arguments, then proceeds
3715 * to the following enumerables in sequence until all of the
3716 * enumerables are exhausted.
3717 *
3718 * If no block is given, returns an enumerator. Otherwise, returns self.
3719 */
3720static VALUE
3721enum_product_each(VALUE obj)
3722{
3723 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3724
3725 return enum_product_run(obj, rb_block_proc());
3726}
3727
3728/*
3729 * call-seq:
3730 * obj.rewind -> obj
3731 *
3732 * Rewinds the product enumerator by calling the "rewind" method on
3733 * each enumerable in reverse order. Each call is performed only if
3734 * the enumerable responds to the method.
3735 */
3736static VALUE
3737enum_product_rewind(VALUE obj)
3738{
3739 struct enum_product *ptr = enum_product_ptr(obj);
3740 VALUE enums = ptr->enums;
3741 long i;
3742
3743 for (i = 0; i < RARRAY_LEN(enums); i++) {
3744 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3745 }
3746
3747 return obj;
3748}
3749
3750static VALUE
3751inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3752{
3753 VALUE klass = rb_obj_class(obj);
3754 struct enum_product *ptr;
3755
3756 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3757
3758 if (!ptr || UNDEF_P(ptr->enums)) {
3759 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3760 }
3761
3762 if (recur) {
3763 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3764 }
3765
3766 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3767}
3768
3769/*
3770 * call-seq:
3771 * obj.inspect -> string
3772 *
3773 * Returns a printable version of the product enumerator.
3774 */
3775static VALUE
3776enum_product_inspect(VALUE obj)
3777{
3778 return rb_exec_recursive(inspect_enum_product, obj, 0);
3779}
3780
3781/*
3782 * call-seq:
3783 * Enumerator.product(*enums) -> enumerator
3784 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3785 *
3786 * Generates a new enumerator object that generates a Cartesian
3787 * product of given enumerable objects. This is equivalent to
3788 * Enumerator::Product.new.
3789 *
3790 * e = Enumerator.product(1..3, [4, 5])
3791 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3792 * e.size #=> 6
3793 *
3794 * When a block is given, calls the block with each N-element array
3795 * generated and returns +nil+.
3796 */
3797static VALUE
3798enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3799{
3800 VALUE enums = Qnil, options = Qnil, block = Qnil;
3801
3802 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3803
3804 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3805 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3806 }
3807
3808 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3809
3810 if (!NIL_P(block)) {
3811 enum_product_run(obj, block);
3812 return Qnil;
3813 }
3814
3815 return obj;
3816}
3817
3819 struct enumerator enumerator;
3820 VALUE begin;
3821 VALUE end;
3822 VALUE step;
3823 bool exclude_end;
3824};
3825
3826RUBY_REFERENCES(arith_seq_refs) = {
3827 RUBY_REF_EDGE(struct enumerator, obj),
3828 RUBY_REF_EDGE(struct enumerator, args),
3829 RUBY_REF_EDGE(struct enumerator, fib),
3830 RUBY_REF_EDGE(struct enumerator, dst),
3831 RUBY_REF_EDGE(struct enumerator, lookahead),
3832 RUBY_REF_EDGE(struct enumerator, feedvalue),
3833 RUBY_REF_EDGE(struct enumerator, stop_exc),
3834 RUBY_REF_EDGE(struct enumerator, size),
3835 RUBY_REF_EDGE(struct enumerator, procs),
3836
3837 RUBY_REF_EDGE(struct arith_seq, begin),
3838 RUBY_REF_EDGE(struct arith_seq, end),
3839 RUBY_REF_EDGE(struct arith_seq, step),
3840 RUBY_REF_END
3841};
3842
3843static const rb_data_type_t arith_seq_data_type = {
3844 "arithmetic_sequence",
3845 {
3846 RUBY_REFS_LIST_PTR(arith_seq_refs),
3848 NULL, // Nothing allocated externally, so don't need a memsize function
3849 NULL,
3850 },
3851 .parent = &enumerator_data_type,
3852 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
3853};
3854
3855static VALUE
3856arith_seq_allocate(VALUE klass)
3857{
3858 struct arith_seq *ptr;
3859 VALUE enum_obj;
3860
3861 enum_obj = TypedData_Make_Struct(klass, struct arith_seq, &arith_seq_data_type, ptr);
3862 ptr->enumerator.obj = Qundef;
3863
3864 return enum_obj;
3865}
3866
3867/*
3868 * Document-class: Enumerator::ArithmeticSequence
3869 *
3870 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3871 * that is a representation of sequences of numbers with common difference.
3872 * Instances of this class can be generated by the Range#step and Numeric#step
3873 * methods.
3874 *
3875 * The class can be used for slicing Array (see Array#slice) or custom
3876 * collections.
3877 */
3878
3879VALUE
3880rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3881 rb_enumerator_size_func *size_fn,
3882 VALUE beg, VALUE end, VALUE step, int excl)
3883{
3884 VALUE aseq = enumerator_init(arith_seq_allocate(rb_cArithSeq),
3885 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3886 struct arith_seq *ptr;
3887 TypedData_Get_Struct(aseq, struct arith_seq, &enumerator_data_type, ptr);
3888
3889 RB_OBJ_WRITE(aseq, &ptr->begin, beg);
3890 RB_OBJ_WRITE(aseq, &ptr->end, end);
3891 RB_OBJ_WRITE(aseq, &ptr->step, step);
3892 ptr->exclude_end = excl;
3893
3894 return aseq;
3895}
3896
3897/*
3898 * call-seq: aseq.begin -> num or nil
3899 *
3900 * Returns the number that defines the first element of this arithmetic
3901 * sequence.
3902 */
3903static inline VALUE
3904arith_seq_begin(VALUE self)
3905{
3906 struct arith_seq *ptr;
3907 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3908 return ptr->begin;
3909}
3910
3911/*
3912 * call-seq: aseq.end -> num or nil
3913 *
3914 * Returns the number that defines the end of this arithmetic sequence.
3915 */
3916static inline VALUE
3917arith_seq_end(VALUE self)
3918{
3919 struct arith_seq *ptr;
3920 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3921 return ptr->end;
3922}
3923
3924/*
3925 * call-seq: aseq.step -> num
3926 *
3927 * Returns the number that defines the common difference between
3928 * two adjacent elements in this arithmetic sequence.
3929 */
3930static inline VALUE
3931arith_seq_step(VALUE self)
3932{
3933 struct arith_seq *ptr;
3934 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3935 return ptr->step;
3936}
3937
3938/*
3939 * call-seq: aseq.exclude_end? -> true or false
3940 *
3941 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3942 */
3943static inline VALUE
3944arith_seq_exclude_end(VALUE self)
3945{
3946 struct arith_seq *ptr;
3947 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3948 return RBOOL(ptr->exclude_end);
3949}
3950
3951static inline int
3952arith_seq_exclude_end_p(VALUE self)
3953{
3954 struct arith_seq *ptr;
3955 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3956 return ptr->exclude_end;
3957}
3958
3959int
3960rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3961{
3962 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3963 component->begin = arith_seq_begin(obj);
3964 component->end = arith_seq_end(obj);
3965 component->step = arith_seq_step(obj);
3966 component->exclude_end = arith_seq_exclude_end_p(obj);
3967 return 1;
3968 }
3969 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3970 component->step = INT2FIX(1);
3971 return 1;
3972 }
3973
3974 return 0;
3975}
3976
3977VALUE
3978rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3979{
3980 RBIMPL_NONNULL_ARG(begp);
3981 RBIMPL_NONNULL_ARG(lenp);
3982 RBIMPL_NONNULL_ARG(stepp);
3983
3985 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3986 return Qfalse;
3987 }
3988
3989 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3990 *stepp = step;
3991
3992 if (step < 0) {
3993 if (aseq.exclude_end && !NIL_P(aseq.end)) {
3994 /* Handle exclusion before range reversal */
3995 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
3996
3997 /* Don't exclude the previous beginning */
3998 aseq.exclude_end = 0;
3999 }
4000 VALUE tmp = aseq.begin;
4001 aseq.begin = aseq.end;
4002 aseq.end = tmp;
4003 }
4004
4005 if (err == 0 && (step < -1 || step > 1)) {
4006 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
4007 if (*begp > len)
4008 goto out_of_range;
4009 if (*lenp > len)
4010 goto out_of_range;
4011 return Qtrue;
4012 }
4013 }
4014 else {
4015 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
4016 }
4017
4018 out_of_range:
4019 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
4020 return Qnil;
4021}
4022
4023static VALUE
4024arith_seq_take(VALUE self, VALUE num)
4025{
4026 VALUE b, e, s, ary;
4027 long n;
4028 int x;
4029
4030 n = NUM2LONG(num);
4031 if (n < 0) {
4032 rb_raise(rb_eArgError, "attempt to take negative size");
4033 }
4034 if (n == 0) {
4035 return rb_ary_new_capa(0);
4036 }
4037
4038 b = arith_seq_begin(self);
4039 e = arith_seq_end(self);
4040 s = arith_seq_step(self);
4041 x = arith_seq_exclude_end_p(self);
4042
4043 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
4044 long i = FIX2LONG(b), unit = FIX2LONG(s);
4045 ary = rb_ary_new_capa(n);
4046 while (n > 0 && FIXABLE(i)) {
4047 rb_ary_push(ary, LONG2FIX(i));
4048 i += unit; // FIXABLE + FIXABLE never overflow;
4049 --n;
4050 }
4051 if (n > 0) {
4052 b = LONG2NUM(i);
4053 while (n > 0) {
4054 rb_ary_push(ary, b);
4055 b = rb_big_plus(b, s);
4056 --n;
4057 }
4058 }
4059 return ary;
4060 }
4061 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
4062 long i = FIX2LONG(b);
4063 long end = FIX2LONG(e);
4064 long unit = FIX2LONG(s);
4065 long len;
4066
4067 if (unit >= 0) {
4068 if (!x) end += 1;
4069
4070 len = end - i;
4071 if (len < 0) len = 0;
4072 ary = rb_ary_new_capa((n < len) ? n : len);
4073 while (n > 0 && i < end) {
4074 rb_ary_push(ary, LONG2FIX(i));
4075 if (i > LONG_MAX - unit) break;
4076 i += unit;
4077 --n;
4078 }
4079 }
4080 else {
4081 if (!x) end -= 1;
4082
4083 len = i - end;
4084 if (len < 0) len = 0;
4085 ary = rb_ary_new_capa((n < len) ? n : len);
4086 while (n > 0 && i > end) {
4087 rb_ary_push(ary, LONG2FIX(i));
4088 if (i < LONG_MIN - unit) break;
4089 i += unit;
4090 --n;
4091 }
4092 }
4093 return ary;
4094 }
4095 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4096 /* generate values like ruby_float_step */
4097
4098 double unit = NUM2DBL(s);
4099 double beg = NUM2DBL(b);
4100 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
4101 double len = ruby_float_step_size(beg, end, unit, x);
4102 long i;
4103
4104 if (n > len)
4105 n = (long)len;
4106
4107 if (isinf(unit)) {
4108 if (len > 0) {
4109 ary = rb_ary_new_capa(1);
4110 rb_ary_push(ary, DBL2NUM(beg));
4111 }
4112 else {
4113 ary = rb_ary_new_capa(0);
4114 }
4115 }
4116 else if (unit == 0) {
4117 VALUE val = DBL2NUM(beg);
4118 ary = rb_ary_new_capa(n);
4119 for (i = 0; i < len; ++i) {
4120 rb_ary_push(ary, val);
4121 }
4122 }
4123 else {
4124 ary = rb_ary_new_capa(n);
4125 for (i = 0; i < n; ++i) {
4126 double d = i*unit+beg;
4127 if (unit >= 0 ? end < d : d < end) d = end;
4128 rb_ary_push(ary, DBL2NUM(d));
4129 }
4130 }
4131
4132 return ary;
4133 }
4134
4135 {
4136 VALUE argv[1];
4137 argv[0] = num;
4138 return rb_call_super(1, argv);
4139 }
4140}
4141
4142/*
4143 * call-seq:
4144 * aseq.first -> num or nil
4145 * aseq.first(n) -> an_array
4146 *
4147 * Returns the first number in this arithmetic sequence,
4148 * or an array of the first +n+ elements.
4149 */
4150static VALUE
4151arith_seq_first(int argc, VALUE *argv, VALUE self)
4152{
4153 VALUE b, e, s;
4154
4155 rb_check_arity(argc, 0, 1);
4156
4157 b = arith_seq_begin(self);
4158 e = arith_seq_end(self);
4159 s = arith_seq_step(self);
4160 if (argc == 0) {
4161 if (NIL_P(b)) {
4162 return Qnil;
4163 }
4164 if (!NIL_P(e)) {
4165 VALUE zero = INT2FIX(0);
4166 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
4167 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
4168 return Qnil;
4169 }
4170 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
4171 return Qnil;
4172 }
4173 }
4174 return b;
4175 }
4176
4177 return arith_seq_take(self, argv[0]);
4178}
4179
4180static inline VALUE
4181num_plus(VALUE a, VALUE b)
4182{
4183 if (RB_INTEGER_TYPE_P(a)) {
4184 return rb_int_plus(a, b);
4185 }
4186 else if (RB_FLOAT_TYPE_P(a)) {
4187 return rb_float_plus(a, b);
4188 }
4189 else if (RB_TYPE_P(a, T_RATIONAL)) {
4190 return rb_rational_plus(a, b);
4191 }
4192 else {
4193 return rb_funcallv(a, '+', 1, &b);
4194 }
4195}
4196
4197static inline VALUE
4198num_minus(VALUE a, VALUE b)
4199{
4200 if (RB_INTEGER_TYPE_P(a)) {
4201 return rb_int_minus(a, b);
4202 }
4203 else if (RB_FLOAT_TYPE_P(a)) {
4204 return rb_float_minus(a, b);
4205 }
4206 else if (RB_TYPE_P(a, T_RATIONAL)) {
4207 return rb_rational_minus(a, b);
4208 }
4209 else {
4210 return rb_funcallv(a, '-', 1, &b);
4211 }
4212}
4213
4214static inline VALUE
4215num_mul(VALUE a, VALUE b)
4216{
4217 if (RB_INTEGER_TYPE_P(a)) {
4218 return rb_int_mul(a, b);
4219 }
4220 else if (RB_FLOAT_TYPE_P(a)) {
4221 return rb_float_mul(a, b);
4222 }
4223 else if (RB_TYPE_P(a, T_RATIONAL)) {
4224 return rb_rational_mul(a, b);
4225 }
4226 else {
4227 return rb_funcallv(a, '*', 1, &b);
4228 }
4229}
4230
4231static inline VALUE
4232num_idiv(VALUE a, VALUE b)
4233{
4234 VALUE q;
4235 if (RB_INTEGER_TYPE_P(a)) {
4236 q = rb_int_idiv(a, b);
4237 }
4238 else if (RB_FLOAT_TYPE_P(a)) {
4239 q = rb_float_div(a, b);
4240 }
4241 else if (RB_TYPE_P(a, T_RATIONAL)) {
4242 q = rb_rational_div(a, b);
4243 }
4244 else {
4245 q = rb_funcallv(a, idDiv, 1, &b);
4246 }
4247
4248 if (RB_INTEGER_TYPE_P(q)) {
4249 return q;
4250 }
4251 else if (RB_FLOAT_TYPE_P(q)) {
4252 return rb_float_floor(q, 0);
4253 }
4254 else if (RB_TYPE_P(q, T_RATIONAL)) {
4255 return rb_rational_floor(q, 0);
4256 }
4257 else {
4258 return rb_funcall(q, rb_intern("floor"), 0);
4259 }
4260}
4261
4262/*
4263 * call-seq:
4264 * aseq.last -> num or nil
4265 * aseq.last(n) -> an_array
4266 *
4267 * Returns the last number in this arithmetic sequence,
4268 * or an array of the last +n+ elements.
4269 */
4270static VALUE
4271arith_seq_last(int argc, VALUE *argv, VALUE self)
4272{
4273 VALUE b, e, s, len_1, len, last, nv, ary;
4274 int last_is_adjusted;
4275 long n;
4276
4277 e = arith_seq_end(self);
4278 if (NIL_P(e)) {
4279 rb_raise(rb_eRangeError,
4280 "cannot get the last element of endless arithmetic sequence");
4281 }
4282
4283 b = arith_seq_begin(self);
4284 s = arith_seq_step(self);
4285
4286 len_1 = num_idiv(num_minus(e, b), s);
4287 if (rb_num_negative_int_p(len_1)) {
4288 if (argc == 0) {
4289 return Qnil;
4290 }
4291 return rb_ary_new_capa(0);
4292 }
4293
4294 last = num_plus(b, num_mul(s, len_1));
4295 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4296 last = num_minus(last, s);
4297 }
4298
4299 if (argc == 0) {
4300 return last;
4301 }
4302
4303 if (last_is_adjusted) {
4304 len = len_1;
4305 }
4306 else {
4307 len = rb_int_plus(len_1, INT2FIX(1));
4308 }
4309
4310 rb_scan_args(argc, argv, "1", &nv);
4311 if (!RB_INTEGER_TYPE_P(nv)) {
4312 nv = rb_to_int(nv);
4313 }
4314 if (RTEST(rb_int_gt(nv, len))) {
4315 nv = len;
4316 }
4317 n = NUM2LONG(nv);
4318 if (n < 0) {
4319 rb_raise(rb_eArgError, "negative array size");
4320 }
4321
4322 ary = rb_ary_new_capa(n);
4323 b = rb_int_minus(last, rb_int_mul(s, nv));
4324 while (n) {
4325 b = rb_int_plus(b, s);
4326 rb_ary_push(ary, b);
4327 --n;
4328 }
4329
4330 return ary;
4331}
4332
4333/*
4334 * call-seq:
4335 * aseq.inspect -> string
4336 *
4337 * Convert this arithmetic sequence to a printable form.
4338 */
4339static VALUE
4340arith_seq_inspect(VALUE self)
4341{
4342 struct enumerator *e;
4343 VALUE eobj, str, eargs;
4344 int range_p;
4345
4346 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4347
4348 eobj = rb_attr_get(self, id_receiver);
4349 if (NIL_P(eobj)) {
4350 eobj = e->obj;
4351 }
4352
4353 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4354 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4355
4356 rb_str_buf_append(str, rb_id2str(e->meth));
4357
4358 eargs = rb_attr_get(eobj, id_arguments);
4359 if (NIL_P(eargs)) {
4360 eargs = e->args;
4361 }
4362 if (eargs != Qfalse) {
4363 long argc = RARRAY_LEN(eargs);
4364 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
4365
4366 if (argc > 0) {
4367 VALUE kwds = Qnil;
4368
4369 rb_str_buf_cat2(str, "(");
4370
4371 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
4372 int all_key = TRUE;
4373 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
4374 if (all_key) kwds = argv[--argc];
4375 }
4376
4377 while (argc--) {
4378 VALUE arg = *argv++;
4379
4380 rb_str_append(str, rb_inspect(arg));
4381 rb_str_buf_cat2(str, ", ");
4382 }
4383 if (!NIL_P(kwds)) {
4384 rb_hash_foreach(kwds, kwd_append, str);
4385 }
4386 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
4387 rb_str_buf_cat2(str, ")");
4388 }
4389 }
4390
4391 rb_str_buf_cat2(str, ")");
4392
4393 return str;
4394}
4395
4396/*
4397 * call-seq:
4398 * aseq == obj -> true or false
4399 *
4400 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4401 * has equivalent begin, end, step, and exclude_end? settings.
4402 */
4403static VALUE
4404arith_seq_eq(VALUE self, VALUE other)
4405{
4406 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4407 return Qfalse;
4408 }
4409
4410 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4411 return Qfalse;
4412 }
4413
4414 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4415 return Qfalse;
4416 }
4417
4418 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4419 return Qfalse;
4420 }
4421
4422 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4423 return Qfalse;
4424 }
4425
4426 return Qtrue;
4427}
4428
4429/*
4430 * call-seq:
4431 * aseq.hash -> integer
4432 *
4433 * Compute a hash-value for this arithmetic sequence.
4434 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4435 * values will generate the same hash-value.
4436 *
4437 * See also Object#hash.
4438 */
4439static VALUE
4440arith_seq_hash(VALUE self)
4441{
4442 st_index_t hash;
4443 VALUE v;
4444
4445 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4446 v = rb_hash(arith_seq_begin(self));
4447 hash = rb_hash_uint(hash, NUM2LONG(v));
4448 v = rb_hash(arith_seq_end(self));
4449 hash = rb_hash_uint(hash, NUM2LONG(v));
4450 v = rb_hash(arith_seq_step(self));
4451 hash = rb_hash_uint(hash, NUM2LONG(v));
4452 hash = rb_hash_end(hash);
4453
4454 return ST2FIX(hash);
4455}
4456
4457#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4458
4460 VALUE current;
4461 VALUE end;
4462 VALUE step;
4463 int excl;
4464};
4465
4466/*
4467 * call-seq:
4468 * aseq.each {|i| block } -> aseq
4469 * aseq.each -> aseq
4470 */
4471static VALUE
4472arith_seq_each(VALUE self)
4473{
4474 VALUE c, e, s, len_1, last;
4475 int x;
4476
4477 if (!rb_block_given_p()) return self;
4478
4479 c = arith_seq_begin(self);
4480 e = arith_seq_end(self);
4481 s = arith_seq_step(self);
4482 x = arith_seq_exclude_end_p(self);
4483
4484 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4485 return self;
4486 }
4487
4488 if (NIL_P(e)) {
4489 while (1) {
4490 rb_yield(c);
4491 c = rb_int_plus(c, s);
4492 }
4493
4494 return self;
4495 }
4496
4497 if (rb_equal(s, INT2FIX(0))) {
4498 while (1) {
4499 rb_yield(c);
4500 }
4501
4502 return self;
4503 }
4504
4505 len_1 = num_idiv(num_minus(e, c), s);
4506 last = num_plus(c, num_mul(s, len_1));
4507 if (x && rb_equal(last, e)) {
4508 last = num_minus(last, s);
4509 }
4510
4511 if (rb_num_negative_int_p(s)) {
4512 while (NUM_GE(c, last)) {
4513 rb_yield(c);
4514 c = num_plus(c, s);
4515 }
4516 }
4517 else {
4518 while (NUM_GE(last, c)) {
4519 rb_yield(c);
4520 c = num_plus(c, s);
4521 }
4522 }
4523
4524 return self;
4525}
4526
4527/*
4528 * call-seq:
4529 * aseq.size -> num or nil
4530 *
4531 * Returns the number of elements in this arithmetic sequence if it is a finite
4532 * sequence. Otherwise, returns <code>nil</code>.
4533 */
4534static VALUE
4535arith_seq_size(VALUE self)
4536{
4537 VALUE b, e, s, len_1, len, last;
4538 int x;
4539
4540 b = arith_seq_begin(self);
4541 e = arith_seq_end(self);
4542 s = arith_seq_step(self);
4543 x = arith_seq_exclude_end_p(self);
4544
4545 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4546 double ee, n;
4547
4548 if (NIL_P(e)) {
4549 if (rb_num_negative_int_p(s)) {
4550 ee = -HUGE_VAL;
4551 }
4552 else {
4553 ee = HUGE_VAL;
4554 }
4555 }
4556 else {
4557 ee = NUM2DBL(e);
4558 }
4559
4560 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4561 if (isinf(n)) return DBL2NUM(n);
4562 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4563 return rb_dbl2big(n);
4564 }
4565
4566 if (NIL_P(e)) {
4567 return DBL2NUM(HUGE_VAL);
4568 }
4569
4570 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4571 s = rb_to_int(s);
4572 }
4573
4574 if (rb_equal(s, INT2FIX(0))) {
4575 return DBL2NUM(HUGE_VAL);
4576 }
4577
4578 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4579 if (rb_num_negative_int_p(len_1)) {
4580 return INT2FIX(0);
4581 }
4582
4583 last = rb_int_plus(b, rb_int_mul(s, len_1));
4584 if (x && rb_equal(last, e)) {
4585 len = len_1;
4586 }
4587 else {
4588 len = rb_int_plus(len_1, INT2FIX(1));
4589 }
4590
4591 return len;
4592}
4593
4594#define sym(name) ID2SYM(rb_intern_const(name))
4595void
4596InitVM_Enumerator(void)
4597{
4598 ID id_private = rb_intern_const("private");
4599
4600 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4601 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4602
4603 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4605
4606 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4607 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4608 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4609 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4610 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4611 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4612 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4613 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4614 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4615 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4616 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4617 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4618 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4619 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4620 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4621 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4622 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4624
4625 /* Lazy */
4627 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4628
4629 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4630 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4631 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4632 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4633 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4634 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4635 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4636 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4637 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4638 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4639 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4640 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4641 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4642 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4643 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4644 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4645 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4646 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4647
4648 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4649 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4650 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4651 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4652 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4653 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4654 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4655 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4656 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4657 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4658 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4659 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4660 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4661 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4662 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4663 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4664 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4665
4666 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4667 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4668 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4669 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4670 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4671 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4672 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4673 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4674 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4675 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4676 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4677 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4678 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4679 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4680 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4681 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4682 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4683 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4684 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4685 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4686 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4687 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4688 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4689 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4690 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4691 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4692 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4693 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4694 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4695 rb_define_method(rb_cLazy, "tap_each", lazy_tap_each, 0);
4696
4697 lazy_use_super_method = rb_hash_new_with_size(18);
4698 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4699 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4700 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4701 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4702 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4703 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4704 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4705 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4706 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4707 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4708 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4709 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4710 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4711 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4712 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4713 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4714 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4715 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4716 rb_obj_freeze(lazy_use_super_method);
4717 rb_vm_register_global_object(lazy_use_super_method);
4718
4719#if 0 /* for RDoc */
4720 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4721 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4722 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4723 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4724 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4725 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4726#endif
4727 rb_define_alias(rb_cLazy, "force", "to_a");
4728
4730 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4731
4732 /* Generator */
4733 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4734 rb_include_module(rb_cGenerator, rb_mEnumerable);
4735 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4736 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4737 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4738 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4739
4740 /* Yielder */
4741 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4742 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4743 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4744 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4745 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4746 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4747
4748 /* Producer */
4749 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4750 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4751 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4752 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4753
4754 /* Chain */
4755 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4756 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4757 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4758 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4759 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4760 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4761 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4762 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4763 rb_undef_method(rb_cEnumChain, "feed");
4764 rb_undef_method(rb_cEnumChain, "next");
4765 rb_undef_method(rb_cEnumChain, "next_values");
4766 rb_undef_method(rb_cEnumChain, "peek");
4767 rb_undef_method(rb_cEnumChain, "peek_values");
4768
4769 /* Product */
4770 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4771 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4772 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4773 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4774 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4775 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4776 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4777 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4778 rb_undef_method(rb_cEnumProduct, "feed");
4779 rb_undef_method(rb_cEnumProduct, "next");
4780 rb_undef_method(rb_cEnumProduct, "next_values");
4781 rb_undef_method(rb_cEnumProduct, "peek");
4782 rb_undef_method(rb_cEnumProduct, "peek_values");
4783 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4784
4785 /* ArithmeticSequence */
4786 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4787 rb_undef_alloc_func(rb_cArithSeq);
4788 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4789 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4790 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4791 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4792 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4793 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4794 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4795 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4796 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4797 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4798 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4799 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4800 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4801 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4802
4803 rb_provide("enumerator.so"); /* for backward compatibility */
4804}
4805#undef sym
4806
4807void
4808Init_Enumerator(void)
4809{
4810 id_rewind = rb_intern_const("rewind");
4811 id_next = rb_intern_const("next");
4812 id_result = rb_intern_const("result");
4813 id_receiver = rb_intern_const("receiver");
4814 id_arguments = rb_intern_const("arguments");
4815 id_memo = rb_intern_const("memo");
4816 id_method = rb_intern_const("method");
4817 id_force = rb_intern_const("force");
4818 id_to_enum = rb_intern_const("to_enum");
4819 id_each_entry = rb_intern_const("each_entry");
4820 sym_each = ID2SYM(id_each);
4821 sym_yield = ID2SYM(rb_intern_const("yield"));
4822
4823 InitVM(Enumerator);
4824}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1730
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:1523
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:1554
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2890
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:1039
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2700
int rb_scan_args_kw(int kw_flag, int argc, const VALUE *argv, const char *fmt,...)
Identical to rb_scan_args(), except it also accepts kw_splat.
Definition class.c:3193
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3180
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:1031
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:1018
int rb_get_kwargs(VALUE keyword_hash, const ID *table, int required, int optional, VALUE *values)
Keyword argument deconstructor.
Definition class.c:2969
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1683
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:205
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:659
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:405
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:657
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:406
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:661
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:2292
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1431
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1427
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1425
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:195
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:467
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1478
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1429
VALUE rb_mKernel
Kernel module.
Definition object.c:60
VALUE rb_cObject
Object class.
Definition object.c:61
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:178
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:95
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:197
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:229
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:547
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:651
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:141
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:888
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1307
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3327
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1120
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1197
#define rb_funcall2
Definition eval.h:207
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:362
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_resize(VALUE ary, long len)
Expands or shrinks the passed array to the passed length.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:208
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:695
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:485
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2261
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:988
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1169
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:1130
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:122
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1861
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:943
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:946
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3818
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:1979
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3784
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3405
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1785
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2034
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1502
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:380
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3474
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1731
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:689
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:683
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:285
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:1024
ID rb_to_id(VALUE str)
Identical to rb_intern_str(), except it tries to convert the parameter object to an instance of rb_cS...
Definition string.c:12693
int len
Length of the buffer.
Definition io.h:8
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1398
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1420
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1375
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1426
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1566
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:372
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
#define RUBY_TYPED_DEFAULT_FREE
This is a value you can set to rb_data_type_struct::dfree.
Definition rtypeddata.h:81
#define RUBY_TYPED_FREE_IMMEDIATELY
Macros to see if each corresponding flag is defined.
Definition rtypeddata.h:122
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:769
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:578
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:231
#define RB_SCAN_ARGS_LAST_HASH_KEYWORDS
Treat a final argument as keywords if it is a hash, and not as keywords otherwise.
Definition scan_args.h:59
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:103
Definition enumerator.c:251
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:229
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:343
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:264
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376