Ruby 3.5.0dev (2025-08-09 revision e639e5fd1af51e2462879d6db862ee5320914ba7)
enumerator.c (e639e5fd1af51e2462879d6db862ee5320914ba7)
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include "id.h"
22#include "internal.h"
23#include "internal/class.h"
24#include "internal/enumerator.h"
25#include "internal/error.h"
26#include "internal/hash.h"
27#include "internal/imemo.h"
28#include "internal/numeric.h"
29#include "internal/range.h"
30#include "internal/rational.h"
31#include "ruby/ruby.h"
32
33/*
34 * Document-class: Enumerator
35 *
36 * A class which allows both internal and external iteration.
37 *
38 * An Enumerator can be created by the following methods.
39 * - Object#to_enum
40 * - Object#enum_for
41 * - Enumerator.new
42 *
43 * Most methods have two forms: a block form where the contents
44 * are evaluated for each item in the enumeration, and a non-block form
45 * which returns a new Enumerator wrapping the iteration.
46 *
47 * enumerator = %w(one two three).each
48 * puts enumerator.class # => Enumerator
49 *
50 * enumerator.each_with_object("foo") do |item, obj|
51 * puts "#{obj}: #{item}"
52 * end
53 *
54 * # foo: one
55 * # foo: two
56 * # foo: three
57 *
58 * enum_with_obj = enumerator.each_with_object("foo")
59 * puts enum_with_obj.class # => Enumerator
60 *
61 * enum_with_obj.each do |item, obj|
62 * puts "#{obj}: #{item}"
63 * end
64 *
65 * # foo: one
66 * # foo: two
67 * # foo: three
68 *
69 * This allows you to chain Enumerators together. For example, you
70 * can map a list's elements to strings containing the index
71 * and the element as a string via:
72 *
73 * puts %w[foo bar baz].map.with_index { |w, i| "#{i}:#{w}" }
74 * # => ["0:foo", "1:bar", "2:baz"]
75 *
76 * == External Iteration
77 *
78 * An Enumerator can also be used as an external iterator.
79 * For example, Enumerator#next returns the next value of the iterator
80 * or raises StopIteration if the Enumerator is at the end.
81 *
82 * e = [1,2,3].each # returns an enumerator object.
83 * puts e.next # => 1
84 * puts e.next # => 2
85 * puts e.next # => 3
86 * puts e.next # raises StopIteration
87 *
88 * +next+, +next_values+, +peek+, and +peek_values+ are the only methods
89 * which use external iteration (and Array#zip(Enumerable-not-Array) which uses +next+ internally).
90 *
91 * These methods do not affect other internal enumeration methods,
92 * unless the underlying iteration method itself has side-effect, e.g. IO#each_line.
93 *
94 * FrozenError will be raised if these methods are called against a frozen enumerator.
95 * Since +rewind+ and +feed+ also change state for external iteration,
96 * these methods may raise FrozenError too.
97 *
98 * External iteration differs *significantly* from internal iteration
99 * due to using a Fiber:
100 * - The Fiber adds some overhead compared to internal enumeration.
101 * - The stacktrace will only include the stack from the Enumerator, not above.
102 * - Fiber-local variables are *not* inherited inside the Enumerator Fiber,
103 * which instead starts with no Fiber-local variables.
104 * - Fiber storage variables *are* inherited and are designed
105 * to handle Enumerator Fibers. Assigning to a Fiber storage variable
106 * only affects the current Fiber, so if you want to change state
107 * in the caller Fiber of the Enumerator Fiber, you need to use an
108 * extra indirection (e.g., use some object in the Fiber storage
109 * variable and mutate some ivar of it).
110 *
111 * Concretely:
112 *
113 * Thread.current[:fiber_local] = 1
114 * Fiber[:storage_var] = 1
115 * e = Enumerator.new do |y|
116 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
117 * p Fiber[:storage_var] # => 1, inherited
118 * Fiber[:storage_var] += 1
119 * y << 42
120 * end
121 *
122 * p e.next # => 42
123 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
124 *
125 * e.each { p _1 }
126 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
127 *
128 * == Convert External Iteration to Internal Iteration
129 *
130 * You can use an external iterator to implement an internal iterator as follows:
131 *
132 * def ext_each(e)
133 * while true
134 * begin
135 * vs = e.next_values
136 * rescue StopIteration
137 * return $!.result
138 * end
139 * y = yield(*vs)
140 * e.feed y
141 * end
142 * end
143 *
144 * o = Object.new
145 *
146 * def o.each
147 * puts yield
148 * puts yield(1)
149 * puts yield(1, 2)
150 * 3
151 * end
152 *
153 * # use o.each as an internal iterator directly.
154 * puts o.each {|*x| puts x; [:b, *x] }
155 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
156 *
157 * # convert o.each to an external iterator for
158 * # implementing an internal iterator.
159 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
160 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
161 *
162 */
164static VALUE rb_cLazy;
165static ID id_rewind, id_to_enum, id_each_entry;
166static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
167static VALUE sym_each, sym_yield;
168
169static VALUE lazy_use_super_method;
170
171extern ID ruby_static_id_cause;
172
173#define id_call idCall
174#define id_cause ruby_static_id_cause
175#define id_each idEach
176#define id_eqq idEqq
177#define id_initialize idInitialize
178#define id_size idSize
179
181
183 VALUE obj;
184 ID meth;
185 VALUE args;
186 VALUE fib;
187 VALUE dst;
188 VALUE lookahead;
189 VALUE feedvalue;
190 VALUE stop_exc;
191 VALUE size;
192 VALUE procs;
194 int kw_splat;
195};
196
197RUBY_REFERENCES(enumerator_refs) = {
198 RUBY_REF_EDGE(struct enumerator, obj),
199 RUBY_REF_EDGE(struct enumerator, args),
200 RUBY_REF_EDGE(struct enumerator, fib),
201 RUBY_REF_EDGE(struct enumerator, dst),
202 RUBY_REF_EDGE(struct enumerator, lookahead),
203 RUBY_REF_EDGE(struct enumerator, feedvalue),
204 RUBY_REF_EDGE(struct enumerator, stop_exc),
205 RUBY_REF_EDGE(struct enumerator, size),
206 RUBY_REF_EDGE(struct enumerator, procs),
207 RUBY_REF_END
208};
209
210static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
211
212struct generator {
213 VALUE proc;
214 VALUE obj;
215};
216
217struct yielder {
218 VALUE proc;
219};
220
221struct producer {
222 VALUE init;
223 VALUE proc;
224};
225
226typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
227typedef VALUE lazyenum_size_func(VALUE, VALUE);
228typedef int lazyenum_precheck_func(VALUE proc_entry);
229typedef struct {
230 lazyenum_proc_func *proc;
231 lazyenum_size_func *size;
232 lazyenum_precheck_func *precheck;
234
236 VALUE proc;
237 VALUE memo;
238 const lazyenum_funcs *fn;
239};
240
241static VALUE generator_allocate(VALUE klass);
242static VALUE generator_init(VALUE obj, VALUE proc);
243
244static VALUE rb_cEnumChain;
245
247 VALUE enums;
248 long pos;
249};
250
251static VALUE rb_cEnumProduct;
252
254 VALUE enums;
255};
256
257VALUE rb_cArithSeq;
258
259static const rb_data_type_t enumerator_data_type = {
260 "enumerator",
261 {
262 RUBY_REFS_LIST_PTR(enumerator_refs),
264 NULL, // Nothing allocated externally, so don't need a memsize function
265 NULL,
266 },
267 0, NULL, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
268};
269
270static struct enumerator *
271enumerator_ptr(VALUE obj)
272{
273 struct enumerator *ptr;
274
275 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
276 if (!ptr || UNDEF_P(ptr->obj)) {
277 rb_raise(rb_eArgError, "uninitialized enumerator");
278 }
279 return ptr;
280}
281
282static void
283proc_entry_mark_and_move(void *p)
284{
285 struct proc_entry *ptr = p;
286 rb_gc_mark_and_move(&ptr->proc);
287 rb_gc_mark_and_move(&ptr->memo);
288}
289
290static const rb_data_type_t proc_entry_data_type = {
291 "proc_entry",
292 {
293 proc_entry_mark_and_move,
295 NULL, // Nothing allocated externally, so don't need a memsize function
296 proc_entry_mark_and_move,
297 },
298 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
299};
300
301static struct proc_entry *
302proc_entry_ptr(VALUE proc_entry)
303{
304 struct proc_entry *ptr;
305
306 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
307
308 return ptr;
309}
310
311/*
312 * call-seq:
313 * obj.to_enum(method = :each, *args) -> enum
314 * obj.enum_for(method = :each, *args) -> enum
315 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
316 * obj.enum_for(method = :each, *args){|*args| block} -> enum
317 *
318 * Creates a new Enumerator which will enumerate by calling +method+ on
319 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
320 * values of enumerator.
321 *
322 * If a block is given, it will be used to calculate the size of
323 * the enumerator without the need to iterate it (see Enumerator#size).
324 *
325 * === Examples
326 *
327 * str = "xyz"
328 *
329 * enum = str.enum_for(:each_byte)
330 * enum.each { |b| puts b }
331 * # => 120
332 * # => 121
333 * # => 122
334 *
335 * # protect an array from being modified by some_method
336 * a = [1, 2, 3]
337 * some_method(a.to_enum)
338 *
339 * # String#split in block form is more memory-effective:
340 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
341 * # This could be rewritten more idiomatically with to_enum:
342 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
343 *
344 * It is typical to call to_enum when defining methods for
345 * a generic Enumerable, in case no block is passed.
346 *
347 * Here is such an example, with parameter passing and a sizing block:
348 *
349 * module Enumerable
350 * # a generic method to repeat the values of any enumerable
351 * def repeat(n)
352 * raise ArgumentError, "#{n} is negative!" if n < 0
353 * unless block_given?
354 * return to_enum(__method__, n) do # __method__ is :repeat here
355 * sz = size # Call size and multiply by n...
356 * sz * n if sz # but return nil if size itself is nil
357 * end
358 * end
359 * each do |*val|
360 * n.times { yield *val }
361 * end
362 * end
363 * end
364 *
365 * %i[hello world].repeat(2) { |w| puts w }
366 * # => Prints 'hello', 'hello', 'world', 'world'
367 * enum = (1..14).repeat(3)
368 * # => returns an Enumerator when called without a block
369 * enum.first(4) # => [1, 1, 1, 2]
370 * enum.size # => 42
371 */
372static VALUE
373obj_to_enum(int argc, VALUE *argv, VALUE obj)
374{
375 VALUE enumerator, meth = sym_each;
376
377 if (argc > 0) {
378 --argc;
379 meth = *argv++;
380 }
381 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
382 if (rb_block_given_p()) {
383 RB_OBJ_WRITE(enumerator, &enumerator_ptr(enumerator)->size, rb_block_proc());
384 }
385 return enumerator;
386}
387
388static VALUE
389enumerator_allocate(VALUE klass)
390{
391 struct enumerator *ptr;
392 VALUE enum_obj;
393
394 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
395 ptr->obj = Qundef;
396
397 return enum_obj;
398}
399
400static VALUE
401enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
402{
403 struct enumerator *ptr;
404
405 rb_check_frozen(enum_obj);
406 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
407
408 if (!ptr) {
409 rb_raise(rb_eArgError, "unallocated enumerator");
410 }
411
412 RB_OBJ_WRITE(enum_obj, &ptr->obj, obj);
413 ptr->meth = rb_to_id(meth);
414 if (argc) RB_OBJ_WRITE(enum_obj, &ptr->args, rb_ary_new4(argc, argv));
415 ptr->fib = 0;
416 ptr->dst = Qnil;
417 ptr->lookahead = Qundef;
418 ptr->feedvalue = Qundef;
419 ptr->stop_exc = Qfalse;
420 RB_OBJ_WRITE(enum_obj, &ptr->size, size);
421 ptr->size_fn = size_fn;
422 ptr->kw_splat = kw_splat;
423
424 return enum_obj;
425}
426
427static VALUE
428convert_to_feasible_size_value(VALUE obj)
429{
430 if (NIL_P(obj)) {
431 return obj;
432 }
433 else if (rb_respond_to(obj, id_call)) {
434 return obj;
435 }
436 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
437 return obj;
438 }
439 else {
440 return rb_to_int(obj);
441 }
442}
443
444/*
445 * call-seq:
446 * Enumerator.new(size = nil) { |yielder| ... }
447 *
448 * Creates a new Enumerator object, which can be used as an
449 * Enumerable.
450 *
451 * Iteration is defined by the given block, in
452 * which a "yielder" object, given as block parameter, can be used to
453 * yield a value by calling the +yield+ method (aliased as <code><<</code>):
454 *
455 * fib = Enumerator.new do |y|
456 * a = b = 1
457 * loop do
458 * y << a
459 * a, b = b, a + b
460 * end
461 * end
462 *
463 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
464 *
465 * The optional parameter can be used to specify how to calculate the size
466 * in a lazy fashion (see Enumerator#size). It can either be a value or
467 * a callable object.
468 */
469static VALUE
470enumerator_initialize(int argc, VALUE *argv, VALUE obj)
471{
472 VALUE iter = rb_block_proc();
473 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
474 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
475 VALUE size = convert_to_feasible_size_value(arg0);
476
477 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
478}
479
480/* :nodoc: */
481static VALUE
482enumerator_init_copy(VALUE obj, VALUE orig)
483{
484 struct enumerator *ptr0, *ptr1;
485
486 if (!OBJ_INIT_COPY(obj, orig)) return obj;
487 ptr0 = enumerator_ptr(orig);
488 if (ptr0->fib) {
489 /* Fibers cannot be copied */
490 rb_raise(rb_eTypeError, "can't copy execution context");
491 }
492
493 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
494
495 if (!ptr1) {
496 rb_raise(rb_eArgError, "unallocated enumerator");
497 }
498
499 RB_OBJ_WRITE(obj, &ptr1->obj, ptr0->obj);
500 ptr1->meth = ptr0->meth;
501 RB_OBJ_WRITE(obj, &ptr1->args, ptr0->args);
502 ptr1->fib = 0;
503 ptr1->lookahead = Qundef;
504 ptr1->feedvalue = Qundef;
505 RB_OBJ_WRITE(obj, &ptr1->size, ptr0->size);
506 ptr1->size_fn = ptr0->size_fn;
507
508 return obj;
509}
510
511/*
512 * For backwards compatibility; use rb_enumeratorize_with_size
513 */
514VALUE
515rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
516{
517 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
518}
519
520static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
521static int lazy_precheck(VALUE procs);
522
523VALUE
524rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
525{
526 VALUE base_class = rb_cEnumerator;
527
528 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
529 base_class = rb_cLazy;
530 }
531 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
532 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
533 }
534
535 return enumerator_init(enumerator_allocate(base_class),
536 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
537}
538
539VALUE
540rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
541{
542 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
543}
544
545static VALUE
546enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
547{
548 int argc = 0;
549 const VALUE *argv = 0;
550 const struct enumerator *e = enumerator_ptr(obj);
551 ID meth = e->meth;
552
553 VALUE args = e->args;
554 if (args) {
555 argc = RARRAY_LENINT(args);
556 argv = RARRAY_CONST_PTR(args);
557 }
558
559 VALUE ret = rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
560
561 RB_GC_GUARD(args);
562
563 return ret;
564}
565
566/*
567 * call-seq:
568 * enum.each { |elm| block } -> obj
569 * enum.each -> enum
570 * enum.each(*appending_args) { |elm| block } -> obj
571 * enum.each(*appending_args) -> an_enumerator
572 *
573 * Iterates over the block according to how this Enumerator was constructed.
574 * If no block and no arguments are given, returns self.
575 *
576 * === Examples
577 *
578 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
579 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
580 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
581 *
582 * obj = Object.new
583 *
584 * def obj.each_arg(a, b=:b, *rest)
585 * yield a
586 * yield b
587 * yield rest
588 * :method_returned
589 * end
590 *
591 * enum = obj.to_enum :each_arg, :a, :x
592 *
593 * enum.each.to_a #=> [:a, :x, []]
594 * enum.each.equal?(enum) #=> true
595 * enum.each { |elm| elm } #=> :method_returned
596 *
597 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
598 * enum.each(:y, :z).equal?(enum) #=> false
599 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
600 *
601 */
602static VALUE
603enumerator_each(int argc, VALUE *argv, VALUE obj)
604{
605 struct enumerator *e = enumerator_ptr(obj);
606
607 if (argc > 0) {
608 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
609 if (args) {
610#if SIZEOF_INT < SIZEOF_LONG
611 /* check int range overflow */
612 rb_long2int(RARRAY_LEN(args) + argc);
613#endif
614 args = rb_ary_dup(args);
615 rb_ary_cat(args, argv, argc);
616 }
617 else {
618 args = rb_ary_new4(argc, argv);
619 }
620 RB_OBJ_WRITE(obj, &e->args, args);
621 e->size = Qnil;
622 e->size_fn = 0;
623 }
624 if (!rb_block_given_p()) return obj;
625
626 if (!lazy_precheck(e->procs)) return Qnil;
627
628 return enumerator_block_call(obj, 0, obj);
629}
630
631static VALUE
632enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
633{
634 struct MEMO *memo = (struct MEMO *)m;
635 VALUE idx = memo->v1;
636 MEMO_V1_SET(memo, rb_int_succ(idx));
637
638 if (argc <= 1)
639 return rb_yield_values(2, val, idx);
640
641 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
642}
643
644static VALUE
645enumerator_size(VALUE obj);
646
647static VALUE
648enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
649{
650 return enumerator_size(obj);
651}
652
653/*
654 * call-seq:
655 * e.with_index(offset = 0) {|(*args), idx| ... }
656 * e.with_index(offset = 0)
657 *
658 * Iterates the given block for each element with an index, which
659 * starts from +offset+. If no block is given, returns a new Enumerator
660 * that includes the index, starting from +offset+
661 *
662 * +offset+:: the starting index to use
663 *
664 */
665static VALUE
666enumerator_with_index(int argc, VALUE *argv, VALUE obj)
667{
668 VALUE memo;
669
670 rb_check_arity(argc, 0, 1);
671 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
672 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
673 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)MEMO_NEW(memo, 0, 0));
674}
675
676/*
677 * call-seq:
678 * e.each_with_index {|(*args), idx| ... }
679 * e.each_with_index
680 *
681 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
682 *
683 * If no block is given, a new Enumerator is returned that includes the index.
684 *
685 */
686static VALUE
687enumerator_each_with_index(VALUE obj)
688{
689 return enumerator_with_index(0, NULL, obj);
690}
691
692static VALUE
693enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
694{
695 if (argc <= 1)
696 return rb_yield_values(2, val, memo);
697
698 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
699}
700
701/*
702 * call-seq:
703 * e.each_with_object(obj) {|(*args), obj| ... }
704 * e.each_with_object(obj)
705 * e.with_object(obj) {|(*args), obj| ... }
706 * e.with_object(obj)
707 *
708 * Iterates the given block for each element with an arbitrary object, +obj+,
709 * and returns +obj+
710 *
711 * If no block is given, returns a new Enumerator.
712 *
713 * === Example
714 *
715 * to_three = Enumerator.new do |y|
716 * 3.times do |x|
717 * y << x
718 * end
719 * end
720 *
721 * to_three_with_string = to_three.with_object("foo")
722 * to_three_with_string.each do |x,string|
723 * puts "#{string}: #{x}"
724 * end
725 *
726 * # => foo: 0
727 * # => foo: 1
728 * # => foo: 2
729 */
730static VALUE
731enumerator_with_object(VALUE obj, VALUE memo)
732{
733 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
734 enumerator_block_call(obj, enumerator_with_object_i, memo);
735
736 return memo;
737}
738
739static VALUE
740next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
741{
742 struct enumerator *e = enumerator_ptr(obj);
743 VALUE feedvalue = Qnil;
744 VALUE args = rb_ary_new4(argc, argv);
745 rb_fiber_yield(1, &args);
746 if (!UNDEF_P(e->feedvalue)) {
747 feedvalue = e->feedvalue;
748 e->feedvalue = Qundef;
749 }
750 return feedvalue;
751}
752
753static VALUE
754next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
755{
756 struct enumerator *e = enumerator_ptr(obj);
757 VALUE nil = Qnil;
758 VALUE result;
759
760 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
761 RB_OBJ_WRITE(obj, &e->stop_exc, rb_exc_new2(rb_eStopIteration, "iteration reached an end"));
762 rb_ivar_set(e->stop_exc, id_result, result);
763 return rb_fiber_yield(1, &nil);
764}
765
766static void
767next_init(VALUE obj, struct enumerator *e)
768{
769 VALUE curr = rb_fiber_current();
770 RB_OBJ_WRITE(obj, &e->dst, curr);
771 RB_OBJ_WRITE(obj, &e->fib, rb_fiber_new(next_i, obj));
772 e->lookahead = Qundef;
773}
774
775static VALUE
776get_next_values(VALUE obj, struct enumerator *e)
777{
778 VALUE curr, vs;
779
780 if (e->stop_exc) {
781 VALUE exc = e->stop_exc;
782 VALUE result = rb_attr_get(exc, id_result);
783 VALUE mesg = rb_attr_get(exc, idMesg);
784 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
785 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
786 rb_ivar_set(stop_exc, id_cause, exc);
787 rb_ivar_set(stop_exc, id_result, result);
788 rb_exc_raise(stop_exc);
789 }
790
791 curr = rb_fiber_current();
792
793 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
794 next_init(obj, e);
795 }
796
797 vs = rb_fiber_resume(e->fib, 1, &curr);
798 if (e->stop_exc) {
799 e->fib = 0;
800 e->dst = Qnil;
801 e->lookahead = Qundef;
802 e->feedvalue = Qundef;
803 rb_exc_raise(e->stop_exc);
804 }
805 return vs;
806}
807
808/*
809 * call-seq:
810 * e.next_values -> array
811 *
812 * Returns the next object as an array in the enumerator, and move the
813 * internal position forward. When the position reached at the end,
814 * StopIteration is raised.
815 *
816 * See class-level notes about external iterators.
817 *
818 * This method can be used to distinguish <code>yield</code> and <code>yield
819 * nil</code>.
820 *
821 * === Example
822 *
823 * o = Object.new
824 * def o.each
825 * yield
826 * yield 1
827 * yield 1, 2
828 * yield nil
829 * yield [1, 2]
830 * end
831 * e = o.to_enum
832 * p e.next_values
833 * p e.next_values
834 * p e.next_values
835 * p e.next_values
836 * p e.next_values
837 * e = o.to_enum
838 * p e.next
839 * p e.next
840 * p e.next
841 * p e.next
842 * p e.next
843 *
844 * ## yield args next_values next
845 * # yield [] nil
846 * # yield 1 [1] 1
847 * # yield 1, 2 [1, 2] [1, 2]
848 * # yield nil [nil] nil
849 * # yield [1, 2] [[1, 2]] [1, 2]
850 *
851 */
852
853static VALUE
854enumerator_next_values(VALUE obj)
855{
856 struct enumerator *e = enumerator_ptr(obj);
857 VALUE vs;
858
859 rb_check_frozen(obj);
860
861 if (!UNDEF_P(e->lookahead)) {
862 vs = e->lookahead;
863 e->lookahead = Qundef;
864 return vs;
865 }
866
867 return get_next_values(obj, e);
868}
869
870static VALUE
871ary2sv(VALUE args, int dup)
872{
873 if (!RB_TYPE_P(args, T_ARRAY))
874 return args;
875
876 switch (RARRAY_LEN(args)) {
877 case 0:
878 return Qnil;
879
880 case 1:
881 return RARRAY_AREF(args, 0);
882
883 default:
884 if (dup)
885 return rb_ary_dup(args);
886 return args;
887 }
888}
889
890/*
891 * call-seq:
892 * e.next -> object
893 *
894 * Returns the next object in the enumerator, and move the internal position
895 * forward. When the position reached at the end, StopIteration is raised.
896 *
897 * === Example
898 *
899 * a = [1,2,3]
900 * e = a.to_enum
901 * p e.next #=> 1
902 * p e.next #=> 2
903 * p e.next #=> 3
904 * p e.next #raises StopIteration
905 *
906 * See class-level notes about external iterators.
907 *
908 */
909
910static VALUE
911enumerator_next(VALUE obj)
912{
913 VALUE vs = enumerator_next_values(obj);
914 return ary2sv(vs, 0);
915}
916
917static VALUE
918enumerator_peek_values(VALUE obj)
919{
920 struct enumerator *e = enumerator_ptr(obj);
921
922 rb_check_frozen(obj);
923
924 if (UNDEF_P(e->lookahead)) {
925 RB_OBJ_WRITE(obj, &e->lookahead, get_next_values(obj, e));
926 }
927
928 return e->lookahead;
929}
930
931/*
932 * call-seq:
933 * e.peek_values -> array
934 *
935 * Returns the next object as an array, similar to Enumerator#next_values, but
936 * doesn't move the internal position forward. If the position is already at
937 * the end, StopIteration is raised.
938 *
939 * See class-level notes about external iterators.
940 *
941 * === Example
942 *
943 * o = Object.new
944 * def o.each
945 * yield
946 * yield 1
947 * yield 1, 2
948 * end
949 * e = o.to_enum
950 * p e.peek_values #=> []
951 * e.next
952 * p e.peek_values #=> [1]
953 * p e.peek_values #=> [1]
954 * e.next
955 * p e.peek_values #=> [1, 2]
956 * e.next
957 * p e.peek_values # raises StopIteration
958 *
959 */
960
961static VALUE
962enumerator_peek_values_m(VALUE obj)
963{
964 return rb_ary_dup(enumerator_peek_values(obj));
965}
966
967/*
968 * call-seq:
969 * e.peek -> object
970 *
971 * Returns the next object in the enumerator, but doesn't move the internal
972 * position forward. If the position is already at the end, StopIteration
973 * is raised.
974 *
975 * See class-level notes about external iterators.
976 *
977 * === Example
978 *
979 * a = [1,2,3]
980 * e = a.to_enum
981 * p e.next #=> 1
982 * p e.peek #=> 2
983 * p e.peek #=> 2
984 * p e.peek #=> 2
985 * p e.next #=> 2
986 * p e.next #=> 3
987 * p e.peek #raises StopIteration
988 *
989 */
990
991static VALUE
992enumerator_peek(VALUE obj)
993{
994 VALUE vs = enumerator_peek_values(obj);
995 return ary2sv(vs, 1);
996}
997
998/*
999 * call-seq:
1000 * e.feed obj -> nil
1001 *
1002 * Sets the value to be returned by the next yield inside +e+.
1003 *
1004 * If the value is not set, the yield returns nil.
1005 *
1006 * This value is cleared after being yielded.
1007 *
1008 * # Array#map passes the array's elements to "yield" and collects the
1009 * # results of "yield" as an array.
1010 * # Following example shows that "next" returns the passed elements and
1011 * # values passed to "feed" are collected as an array which can be
1012 * # obtained by StopIteration#result.
1013 * e = [1,2,3].map
1014 * p e.next #=> 1
1015 * e.feed "a"
1016 * p e.next #=> 2
1017 * e.feed "b"
1018 * p e.next #=> 3
1019 * e.feed "c"
1020 * begin
1021 * e.next
1022 * rescue StopIteration
1023 * p $!.result #=> ["a", "b", "c"]
1024 * end
1025 *
1026 * o = Object.new
1027 * def o.each
1028 * x = yield # (2) blocks
1029 * p x # (5) => "foo"
1030 * x = yield # (6) blocks
1031 * p x # (8) => nil
1032 * x = yield # (9) blocks
1033 * p x # not reached w/o another e.next
1034 * end
1035 *
1036 * e = o.to_enum
1037 * e.next # (1)
1038 * e.feed "foo" # (3)
1039 * e.next # (4)
1040 * e.next # (7)
1041 * # (10)
1042 */
1043
1044static VALUE
1045enumerator_feed(VALUE obj, VALUE v)
1046{
1047 struct enumerator *e = enumerator_ptr(obj);
1048
1049 rb_check_frozen(obj);
1050
1051 if (!UNDEF_P(e->feedvalue)) {
1052 rb_raise(rb_eTypeError, "feed value already set");
1053 }
1054 RB_OBJ_WRITE(obj, &e->feedvalue, v);
1055
1056 return Qnil;
1057}
1058
1059/*
1060 * call-seq:
1061 * e.rewind -> e
1062 *
1063 * Rewinds the enumeration sequence to the beginning.
1064 *
1065 * If the enclosed object responds to a "rewind" method, it is called.
1066 */
1067
1068static VALUE
1069enumerator_rewind(VALUE obj)
1070{
1071 struct enumerator *e = enumerator_ptr(obj);
1072
1073 rb_check_frozen(obj);
1074
1075 rb_check_funcall(e->obj, id_rewind, 0, 0);
1076
1077 e->fib = 0;
1078 e->dst = Qnil;
1079 e->lookahead = Qundef;
1080 e->feedvalue = Qundef;
1081 e->stop_exc = Qfalse;
1082 return obj;
1083}
1084
1085static struct generator *generator_ptr(VALUE obj);
1086static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1087
1088static VALUE
1089inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1090{
1091 struct enumerator *e;
1092 VALUE eobj, str, cname;
1093
1094 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1095
1096 cname = rb_obj_class(obj);
1097
1098 if (!e || UNDEF_P(e->obj)) {
1099 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1100 }
1101
1102 if (recur) {
1103 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1104 return str;
1105 }
1106
1107 if (e->procs) {
1108 long i;
1109
1110 eobj = generator_ptr(e->obj)->obj;
1111 /* In case procs chained enumerator traversing all proc entries manually */
1112 if (rb_obj_class(eobj) == cname) {
1113 str = rb_inspect(eobj);
1114 }
1115 else {
1116 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1117 }
1118 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1119 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1120 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1121 rb_str_buf_cat2(str, ">");
1122 }
1123 return str;
1124 }
1125
1126 eobj = rb_attr_get(obj, id_receiver);
1127 if (NIL_P(eobj)) {
1128 eobj = e->obj;
1129 }
1130
1131 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1132 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1133 append_method(obj, str, e->meth, e->args);
1134
1135 rb_str_buf_cat2(str, ">");
1136
1137 return str;
1138}
1139
1140static int
1141key_symbol_p(VALUE key, VALUE val, VALUE arg)
1142{
1143 if (SYMBOL_P(key)) return ST_CONTINUE;
1144 *(int *)arg = FALSE;
1145 return ST_STOP;
1146}
1147
1148static int
1149kwd_append(VALUE key, VALUE val, VALUE str)
1150{
1151 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1152 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1153 return ST_CONTINUE;
1154}
1155
1156static VALUE
1157append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1158{
1159 VALUE method, eargs;
1160
1161 method = rb_attr_get(obj, id_method);
1162 if (method != Qfalse) {
1163 if (!NIL_P(method)) {
1164 Check_Type(method, T_SYMBOL);
1165 method = rb_sym2str(method);
1166 }
1167 else {
1168 method = rb_id2str(default_method);
1169 }
1170 rb_str_buf_cat2(str, ":");
1171 rb_str_buf_append(str, method);
1172 }
1173
1174 eargs = rb_attr_get(obj, id_arguments);
1175 if (NIL_P(eargs)) {
1176 eargs = default_args;
1177 }
1178 if (eargs != Qfalse) {
1179 long argc = RARRAY_LEN(eargs);
1180 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1181
1182 if (argc > 0) {
1183 VALUE kwds = Qnil;
1184
1185 rb_str_buf_cat2(str, "(");
1186
1187 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1188 int all_key = TRUE;
1189 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1190 if (all_key) kwds = argv[--argc];
1191 }
1192
1193 while (argc--) {
1194 VALUE arg = *argv++;
1195
1196 rb_str_append(str, rb_inspect(arg));
1197 rb_str_buf_cat2(str, ", ");
1198 }
1199 if (!NIL_P(kwds)) {
1200 rb_hash_foreach(kwds, kwd_append, str);
1201 }
1202 rb_str_set_len(str, RSTRING_LEN(str)-2);
1203 rb_str_buf_cat2(str, ")");
1204 }
1205 }
1206
1207 return str;
1208}
1209
1210/*
1211 * call-seq:
1212 * e.inspect -> string
1213 *
1214 * Creates a printable version of <i>e</i>.
1215 */
1216
1217static VALUE
1218enumerator_inspect(VALUE obj)
1219{
1220 return rb_exec_recursive(inspect_enumerator, obj, 0);
1221}
1222
1223/*
1224 * call-seq:
1225 * e.size -> int, Float::INFINITY or nil
1226 *
1227 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1228 *
1229 * (1..100).to_a.permutation(4).size # => 94109400
1230 * loop.size # => Float::INFINITY
1231 * (1..100).drop_while.size # => nil
1232 */
1233
1234static VALUE
1235enumerator_size(VALUE obj)
1236{
1237 struct enumerator *e = enumerator_ptr(obj);
1238 int argc = 0;
1239 const VALUE *argv = NULL;
1240 VALUE size;
1241
1242 if (e->procs) {
1243 struct generator *g = generator_ptr(e->obj);
1244 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1245 long i = 0;
1246
1247 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1248 VALUE proc = RARRAY_AREF(e->procs, i);
1249 struct proc_entry *entry = proc_entry_ptr(proc);
1250 lazyenum_size_func *size_fn = entry->fn->size;
1251 if (!size_fn) {
1252 return Qnil;
1253 }
1254 receiver = (*size_fn)(proc, receiver);
1255 }
1256 return receiver;
1257 }
1258
1259 if (e->size_fn) {
1260 return (*e->size_fn)(e->obj, e->args, obj);
1261 }
1262 if (e->args) {
1263 argc = (int)RARRAY_LEN(e->args);
1264 argv = RARRAY_CONST_PTR(e->args);
1265 }
1266 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1267 if (!UNDEF_P(size)) return size;
1268 return e->size;
1269}
1270
1271/*
1272 * Yielder
1273 */
1274static void
1275yielder_mark_and_move(void *p)
1276{
1277 struct yielder *ptr = p;
1278 rb_gc_mark_and_move(&ptr->proc);
1279}
1280
1281static const rb_data_type_t yielder_data_type = {
1282 "yielder",
1283 {
1284 yielder_mark_and_move,
1286 NULL,
1287 yielder_mark_and_move,
1288 },
1289 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1290};
1291
1292static struct yielder *
1293yielder_ptr(VALUE obj)
1294{
1295 struct yielder *ptr;
1296
1297 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1298 if (!ptr || UNDEF_P(ptr->proc)) {
1299 rb_raise(rb_eArgError, "uninitialized yielder");
1300 }
1301 return ptr;
1302}
1303
1304/* :nodoc: */
1305static VALUE
1306yielder_allocate(VALUE klass)
1307{
1308 struct yielder *ptr;
1309 VALUE obj;
1310
1311 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1312 ptr->proc = Qundef;
1313
1314 return obj;
1315}
1316
1317static VALUE
1318yielder_init(VALUE obj, VALUE proc)
1319{
1320 struct yielder *ptr;
1321
1322 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1323
1324 if (!ptr) {
1325 rb_raise(rb_eArgError, "unallocated yielder");
1326 }
1327
1328 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1329
1330 return obj;
1331}
1332
1333/* :nodoc: */
1334static VALUE
1335yielder_initialize(VALUE obj)
1336{
1337 rb_need_block();
1338
1339 return yielder_init(obj, rb_block_proc());
1340}
1341
1342/* :nodoc: */
1343static VALUE
1344yielder_yield(VALUE obj, VALUE args)
1345{
1346 struct yielder *ptr = yielder_ptr(obj);
1347
1348 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1349}
1350
1351/* :nodoc: */
1352static VALUE
1353yielder_yield_push(VALUE obj, VALUE arg)
1354{
1355 struct yielder *ptr = yielder_ptr(obj);
1356
1357 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1358
1359 return obj;
1360}
1361
1362/*
1363 * Returns a Proc object that takes arguments and yields them.
1364 *
1365 * This method is implemented so that a Yielder object can be directly
1366 * passed to another method as a block argument.
1367 *
1368 * enum = Enumerator.new { |y|
1369 * Dir.glob("*.rb") { |file|
1370 * File.open(file) { |f| f.each_line(&y) }
1371 * }
1372 * }
1373 */
1374static VALUE
1375yielder_to_proc(VALUE obj)
1376{
1377 VALUE method = rb_obj_method(obj, sym_yield);
1378
1379 return rb_funcall(method, idTo_proc, 0);
1380}
1381
1382static VALUE
1383yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1384{
1385 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1386}
1387
1388static VALUE
1389yielder_new(void)
1390{
1391 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1392}
1393
1394/*
1395 * Generator
1396 */
1397static void
1398generator_mark_and_move(void *p)
1399{
1400 struct generator *ptr = p;
1401 rb_gc_mark_and_move(&ptr->proc);
1402 rb_gc_mark_and_move(&ptr->obj);
1403}
1404
1405static const rb_data_type_t generator_data_type = {
1406 "generator",
1407 {
1408 generator_mark_and_move,
1410 NULL,
1411 generator_mark_and_move,
1412 },
1413 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1414};
1415
1416static struct generator *
1417generator_ptr(VALUE obj)
1418{
1419 struct generator *ptr;
1420
1421 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1422 if (!ptr || UNDEF_P(ptr->proc)) {
1423 rb_raise(rb_eArgError, "uninitialized generator");
1424 }
1425 return ptr;
1426}
1427
1428/* :nodoc: */
1429static VALUE
1430generator_allocate(VALUE klass)
1431{
1432 struct generator *ptr;
1433 VALUE obj;
1434
1435 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1436 ptr->proc = Qundef;
1437
1438 return obj;
1439}
1440
1441static VALUE
1442generator_init(VALUE obj, VALUE proc)
1443{
1444 struct generator *ptr;
1445
1446 rb_check_frozen(obj);
1447 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1448
1449 if (!ptr) {
1450 rb_raise(rb_eArgError, "unallocated generator");
1451 }
1452
1453 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1454
1455 return obj;
1456}
1457
1458/* :nodoc: */
1459static VALUE
1460generator_initialize(int argc, VALUE *argv, VALUE obj)
1461{
1462 VALUE proc;
1463
1464 if (argc == 0) {
1465 rb_need_block();
1466
1467 proc = rb_block_proc();
1468 }
1469 else {
1470 rb_scan_args(argc, argv, "1", &proc);
1471
1472 if (!rb_obj_is_proc(proc))
1473 rb_raise(rb_eTypeError,
1474 "wrong argument type %"PRIsVALUE" (expected Proc)",
1475 rb_obj_class(proc));
1476
1477 if (rb_block_given_p()) {
1478 rb_warn("given block not used");
1479 }
1480 }
1481
1482 return generator_init(obj, proc);
1483}
1484
1485/* :nodoc: */
1486static VALUE
1487generator_init_copy(VALUE obj, VALUE orig)
1488{
1489 struct generator *ptr0, *ptr1;
1490
1491 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1492
1493 ptr0 = generator_ptr(orig);
1494
1495 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1496
1497 if (!ptr1) {
1498 rb_raise(rb_eArgError, "unallocated generator");
1499 }
1500
1501 RB_OBJ_WRITE(obj, &ptr1->proc, ptr0->proc);
1502
1503 return obj;
1504}
1505
1506/* :nodoc: */
1507static VALUE
1508generator_each(int argc, VALUE *argv, VALUE obj)
1509{
1510 struct generator *ptr = generator_ptr(obj);
1511 VALUE args = rb_ary_new2(argc + 1);
1512
1513 rb_ary_push(args, yielder_new());
1514 if (argc > 0) {
1515 rb_ary_cat(args, argv, argc);
1516 }
1517
1518 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1519}
1520
1521/* Lazy Enumerator methods */
1522static VALUE
1523enum_size(VALUE self)
1524{
1525 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1526 return UNDEF_P(r) ? Qnil : r;
1527}
1528
1529static VALUE
1530lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1531{
1532 return enum_size(self);
1533}
1534
1535#define lazy_receiver_size lazy_map_size
1536
1537static VALUE
1538lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1539{
1540 VALUE result;
1541 if (argc == 1) {
1542 VALUE args[2];
1543 args[0] = m;
1544 args[1] = val;
1545 result = rb_yield_values2(2, args);
1546 }
1547 else {
1548 VALUE args;
1549 int len = rb_long2int((long)argc + 1);
1550 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1551
1552 nargv[0] = m;
1553 if (argc > 0) {
1554 MEMCPY(nargv + 1, argv, VALUE, argc);
1555 }
1556 result = rb_yield_values2(len, nargv);
1557 ALLOCV_END(args);
1558 }
1559 if (UNDEF_P(result)) rb_iter_break();
1560 return Qnil;
1561}
1562
1563static VALUE
1564lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1565{
1566 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1567 return Qnil;
1568}
1569
1570#define memo_value v2
1571#define memo_flags u3.state
1572#define LAZY_MEMO_BREAK 1
1573#define LAZY_MEMO_PACKED 2
1574#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1575#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1576#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1577#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1578#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1579#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1580#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1581
1582#define LAZY_NEED_BLOCK(func) \
1583 if (!rb_block_given_p()) { \
1584 rb_raise(rb_eArgError, "tried to call lazy " #func " without a block"); \
1585 }
1586
1587static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1588
1589static VALUE
1590lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1591{
1592 VALUE yielder = RARRAY_AREF(m, 0);
1593 VALUE procs_array = RARRAY_AREF(m, 1);
1594 VALUE memos = rb_attr_get(yielder, id_memo);
1595 struct MEMO *result;
1596
1597 result = MEMO_NEW(m, rb_enum_values_pack(argc, argv),
1598 argc > 1 ? LAZY_MEMO_PACKED : 0);
1599 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1600}
1601
1602static VALUE
1603lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1604{
1605 VALUE m = result->v1;
1606 VALUE yielder = RARRAY_AREF(m, 0);
1607 VALUE procs_array = RARRAY_AREF(m, 1);
1608 VALUE memos = rb_attr_get(yielder, id_memo);
1609 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1610 if (argc > 1)
1611 LAZY_MEMO_SET_PACKED(result);
1612 else
1613 LAZY_MEMO_RESET_PACKED(result);
1614 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1615}
1616
1617static VALUE
1618lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1619{
1620 int cont = 1;
1621
1622 for (; i < RARRAY_LEN(procs_array); i++) {
1623 VALUE proc = RARRAY_AREF(procs_array, i);
1624 struct proc_entry *entry = proc_entry_ptr(proc);
1625 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1626 cont = 0;
1627 break;
1628 }
1629 }
1630
1631 if (cont) {
1632 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1633 }
1634 if (LAZY_MEMO_BREAK_P(result)) {
1635 rb_iter_break();
1636 }
1637 return result->memo_value;
1638}
1639
1640static VALUE
1641lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1642{
1643 VALUE procs = RARRAY_AREF(m, 1);
1644
1645 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1646 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1647 lazy_init_yielder, rb_ary_new3(2, val, procs));
1648 return Qnil;
1649}
1650
1651static VALUE
1652lazy_generator_init(VALUE enumerator, VALUE procs)
1653{
1655 VALUE obj;
1656 struct generator *gen_ptr;
1657 struct enumerator *e = enumerator_ptr(enumerator);
1658
1659 if (RARRAY_LEN(procs) > 0) {
1660 struct generator *old_gen_ptr = generator_ptr(e->obj);
1661 obj = old_gen_ptr->obj;
1662 }
1663 else {
1664 obj = enumerator;
1665 }
1666
1667 generator = generator_allocate(rb_cGenerator);
1668
1669 rb_block_call(generator, id_initialize, 0, 0,
1670 lazy_init_block, rb_ary_new3(2, obj, procs));
1671
1672 gen_ptr = generator_ptr(generator);
1673 RB_OBJ_WRITE(generator, &gen_ptr->obj, obj);
1674
1675 return generator;
1676}
1677
1678static int
1679lazy_precheck(VALUE procs)
1680{
1681 if (RTEST(procs)) {
1682 long num_procs = RARRAY_LEN(procs), i = num_procs;
1683 while (i-- > 0) {
1684 VALUE proc = RARRAY_AREF(procs, i);
1685 struct proc_entry *entry = proc_entry_ptr(proc);
1686 lazyenum_precheck_func *precheck = entry->fn->precheck;
1687 if (precheck && !precheck(proc)) return FALSE;
1688 }
1689 }
1690
1691 return TRUE;
1692}
1693
1694/*
1695 * Document-class: Enumerator::Lazy
1696 *
1697 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1698 * chains of operations without evaluating them immediately, and evaluating
1699 * values on as-needed basis. In order to do so it redefines most of Enumerable
1700 * methods so that they just construct another lazy enumerator.
1701 *
1702 * Enumerator::Lazy can be constructed from any Enumerable with the
1703 * Enumerable#lazy method.
1704 *
1705 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1706 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1707 *
1708 * The real enumeration is performed when any non-redefined Enumerable method
1709 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1710 * as #force for more semantic code):
1711 *
1712 * lazy.first(2)
1713 * #=> [21, 23]
1714 *
1715 * lazy.force
1716 * #=> [21, 23, 25, 27, 29]
1717 *
1718 * Note that most Enumerable methods that could be called with or without
1719 * a block, on Enumerator::Lazy will always require a block:
1720 *
1721 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1722 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1723 *
1724 * This class allows idiomatic calculations on long or infinite sequences, as well
1725 * as chaining of calculations without constructing intermediate arrays.
1726 *
1727 * Example for working with a slowly calculated sequence:
1728 *
1729 * require 'open-uri'
1730 *
1731 * # This will fetch all URLs before selecting
1732 * # necessary data
1733 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1734 * .select { |data| data.key?('stats') }
1735 * .first(5)
1736 *
1737 * # This will fetch URLs one-by-one, only till
1738 * # there is enough data to satisfy the condition
1739 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1740 * .select { |data| data.key?('stats') }
1741 * .first(5)
1742 *
1743 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1744 * is suitable for returning or passing to another method that expects
1745 * a normal enumerator.
1746 *
1747 * def active_items
1748 * groups
1749 * .lazy
1750 * .flat_map(&:items)
1751 * .reject(&:disabled)
1752 * .eager
1753 * end
1754 *
1755 * # This works lazily; if a checked item is found, it stops
1756 * # iteration and does not look into remaining groups.
1757 * first_checked = active_items.find(&:checked)
1758 *
1759 * # This returns an array of items like a normal enumerator does.
1760 * all_checked = active_items.select(&:checked)
1761 *
1762 */
1763
1764/*
1765 * call-seq:
1766 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1767 *
1768 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1769 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1770 * to the given block. The block can yield values back using +yielder+.
1771 * For example, to create a "filter+map" enumerator:
1772 *
1773 * def filter_map(sequence)
1774 * Lazy.new(sequence) do |yielder, *values|
1775 * result = yield *values
1776 * yielder << result if result
1777 * end
1778 * end
1779 *
1780 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1781 * #=> [4, 16, 36, 64, 100]
1782 */
1783static VALUE
1784lazy_initialize(int argc, VALUE *argv, VALUE self)
1785{
1786 VALUE obj, size = Qnil;
1788
1789 rb_check_arity(argc, 1, 2);
1790 LAZY_NEED_BLOCK(new);
1791 obj = argv[0];
1792 if (argc > 1) {
1793 size = argv[1];
1794 }
1795 generator = generator_allocate(rb_cGenerator);
1796 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1797 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1798 rb_ivar_set(self, id_receiver, obj);
1799
1800 return self;
1801}
1802
1803#if 0 /* for RDoc */
1804/*
1805 * call-seq:
1806 * lazy.to_a -> array
1807 * lazy.force -> array
1808 *
1809 * Expands +lazy+ enumerator to an array.
1810 * See Enumerable#to_a.
1811 */
1812static VALUE
1813lazy_to_a(VALUE self)
1814{
1815}
1816#endif
1817
1818static void
1819lazy_set_args(VALUE lazy, VALUE args)
1820{
1821 ID id = rb_frame_this_func();
1822 rb_ivar_set(lazy, id_method, ID2SYM(id));
1823 if (NIL_P(args)) {
1824 /* Qfalse indicates that the arguments are empty */
1825 rb_ivar_set(lazy, id_arguments, Qfalse);
1826 }
1827 else {
1828 rb_ivar_set(lazy, id_arguments, args);
1829 }
1830}
1831
1832#if 0
1833static VALUE
1834lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1835{
1836 struct enumerator *e = enumerator_ptr(lazy);
1837 lazy_set_args(lazy, args);
1838 e->size_fn = size_fn;
1839 return lazy;
1840}
1841#endif
1842
1843static VALUE
1844lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1845 const lazyenum_funcs *fn)
1846{
1847 struct enumerator *new_e;
1848 VALUE new_obj;
1849 VALUE new_generator;
1850 VALUE new_procs;
1851 struct enumerator *e = enumerator_ptr(obj);
1852 struct proc_entry *entry;
1853 VALUE entry_obj = TypedData_Make_Struct(rb_cObject, struct proc_entry,
1854 &proc_entry_data_type, entry);
1855 if (rb_block_given_p()) {
1856 RB_OBJ_WRITE(entry_obj, &entry->proc, rb_block_proc());
1857 }
1858 entry->fn = fn;
1859 RB_OBJ_WRITE(entry_obj, &entry->memo, args);
1860
1861 lazy_set_args(entry_obj, memo);
1862
1863 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1864 new_generator = lazy_generator_init(obj, new_procs);
1865 rb_ary_push(new_procs, entry_obj);
1866
1867 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1868 new_e = RTYPEDDATA_GET_DATA(new_obj);
1869 RB_OBJ_WRITE(new_obj, &new_e->obj, new_generator);
1870 RB_OBJ_WRITE(new_obj, &new_e->procs, new_procs);
1871
1872 if (argc > 0) {
1873 new_e->meth = rb_to_id(*argv++);
1874 --argc;
1875 }
1876 else {
1877 new_e->meth = id_each;
1878 }
1879
1880 RB_OBJ_WRITE(new_obj, &new_e->args, rb_ary_new4(argc, argv));
1881
1882 return new_obj;
1883}
1884
1885/*
1886 * call-seq:
1887 * e.lazy -> lazy_enumerator
1888 *
1889 * Returns an Enumerator::Lazy, which redefines most Enumerable
1890 * methods to postpone enumeration and enumerate values only on an
1891 * as-needed basis.
1892 *
1893 * === Example
1894 *
1895 * The following program finds pythagorean triples:
1896 *
1897 * def pythagorean_triples
1898 * (1..Float::INFINITY).lazy.flat_map {|z|
1899 * (1..z).flat_map {|x|
1900 * (x..z).select {|y|
1901 * x**2 + y**2 == z**2
1902 * }.map {|y|
1903 * [x, y, z]
1904 * }
1905 * }
1906 * }
1907 * end
1908 * # show first ten pythagorean triples
1909 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1910 * p pythagorean_triples.first(10) # first is eager
1911 * # show pythagorean triples less than 100
1912 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1913 */
1914static VALUE
1915enumerable_lazy(VALUE obj)
1916{
1917 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1918 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1919 rb_ivar_set(result, id_method, Qfalse);
1920 return result;
1921}
1922
1923static VALUE
1924lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1925{
1926 return enumerator_init(enumerator_allocate(rb_cLazy),
1927 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1928}
1929
1930/*
1931 * call-seq:
1932 * lzy.to_enum(method = :each, *args) -> lazy_enum
1933 * lzy.enum_for(method = :each, *args) -> lazy_enum
1934 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1935 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1936 *
1937 * Similar to Object#to_enum, except it returns a lazy enumerator.
1938 * This makes it easy to define Enumerable methods that will
1939 * naturally remain lazy if called from a lazy enumerator.
1940 *
1941 * For example, continuing from the example in Object#to_enum:
1942 *
1943 * # See Object#to_enum for the definition of repeat
1944 * r = 1..Float::INFINITY
1945 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1946 * r.repeat(2).class # => Enumerator
1947 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1948 * # works naturally on lazy enumerator:
1949 * r.lazy.repeat(2).class # => Enumerator::Lazy
1950 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1951 */
1952
1953static VALUE
1954lazy_to_enum(int argc, VALUE *argv, VALUE self)
1955{
1956 VALUE lazy, meth = sym_each, super_meth;
1957
1958 if (argc > 0) {
1959 --argc;
1960 meth = *argv++;
1961 }
1962 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
1963 meth = super_meth;
1964 }
1965 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
1966 if (rb_block_given_p()) {
1967 RB_OBJ_WRITE(lazy, &enumerator_ptr(lazy)->size, rb_block_proc());
1968 }
1969 return lazy;
1970}
1971
1972static VALUE
1973lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
1974{
1975 return enum_size(self);
1976}
1977
1978/*
1979 * call-seq:
1980 * lzy.eager -> enum
1981 *
1982 * Returns a non-lazy Enumerator converted from the lazy enumerator.
1983 */
1984
1985static VALUE
1986lazy_eager(VALUE self)
1987{
1988 return enumerator_init(enumerator_allocate(rb_cEnumerator),
1989 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
1990}
1991
1992static VALUE
1993lazyenum_yield(VALUE proc_entry, struct MEMO *result)
1994{
1995 struct proc_entry *entry = proc_entry_ptr(proc_entry);
1996 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
1997}
1998
1999static VALUE
2000lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2001{
2002 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2003 int argc = 1;
2004 const VALUE *argv = &result->memo_value;
2005 if (LAZY_MEMO_PACKED_P(result)) {
2006 const VALUE args = *argv;
2007 argc = RARRAY_LENINT(args);
2008 argv = RARRAY_CONST_PTR(args);
2009 }
2010 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2011}
2012
2013static struct MEMO *
2014lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2015{
2016 VALUE value = lazyenum_yield_values(proc_entry, result);
2017 LAZY_MEMO_SET_VALUE(result, value);
2018 LAZY_MEMO_RESET_PACKED(result);
2019 return result;
2020}
2021
2022static VALUE
2023lazy_map_size(VALUE entry, VALUE receiver)
2024{
2025 return receiver;
2026}
2027
2028static const lazyenum_funcs lazy_map_funcs = {
2029 lazy_map_proc, lazy_map_size,
2030};
2031
2032/*
2033 * call-seq:
2034 * lazy.collect { |obj| block } -> lazy_enumerator
2035 * lazy.map { |obj| block } -> lazy_enumerator
2036 *
2037 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2038 *
2039 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2040 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2041 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2042 * #=> [1, 4, 9]
2043 */
2044
2045static VALUE
2046lazy_map(VALUE obj)
2047{
2048 LAZY_NEED_BLOCK(map);
2049 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2050}
2051
2053 struct MEMO *result;
2054 long index;
2055};
2056
2057static VALUE
2058lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2059{
2060 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2061
2062 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2063}
2064
2065static struct MEMO *
2066lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2067{
2068 VALUE value = lazyenum_yield_values(proc_entry, result);
2069 VALUE ary = 0;
2070 const long proc_index = memo_index + 1;
2071 int break_p = LAZY_MEMO_BREAK_P(result);
2072
2073 if (RB_TYPE_P(value, T_ARRAY)) {
2074 ary = value;
2075 }
2076 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2077 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2078 LAZY_MEMO_RESET_BREAK(result);
2079 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2080 if (break_p) LAZY_MEMO_SET_BREAK(result);
2081 return 0;
2082 }
2083
2084 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2085 long i;
2086 LAZY_MEMO_RESET_BREAK(result);
2087 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2088 const VALUE argv = RARRAY_AREF(ary, i);
2089 lazy_yielder_yield(result, proc_index, 1, &argv);
2090 }
2091 if (break_p) LAZY_MEMO_SET_BREAK(result);
2092 if (i >= RARRAY_LEN(ary)) return 0;
2093 value = RARRAY_AREF(ary, i);
2094 }
2095 LAZY_MEMO_SET_VALUE(result, value);
2096 LAZY_MEMO_RESET_PACKED(result);
2097 return result;
2098}
2099
2100static const lazyenum_funcs lazy_flat_map_funcs = {
2101 lazy_flat_map_proc, 0,
2102};
2103
2104/*
2105 * call-seq:
2106 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2107 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2108 *
2109 * Returns a new lazy enumerator with the concatenated results of running
2110 * +block+ once for every element in the lazy enumerator.
2111 *
2112 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2113 * #=> ["f", "o", "o", "b", "a", "r"]
2114 *
2115 * A value +x+ returned by +block+ is decomposed if either of
2116 * the following conditions is true:
2117 *
2118 * * +x+ responds to both each and force, which means that
2119 * +x+ is a lazy enumerator.
2120 * * +x+ is an array or responds to to_ary.
2121 *
2122 * Otherwise, +x+ is contained as-is in the return value.
2123 *
2124 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2125 * #=> [{:a=>1}, {:b=>2}]
2126 */
2127static VALUE
2128lazy_flat_map(VALUE obj)
2129{
2130 LAZY_NEED_BLOCK(flat_map);
2131 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2132}
2133
2134static struct MEMO *
2135lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2136{
2137 VALUE chain = lazyenum_yield(proc_entry, result);
2138 if (!RTEST(chain)) return 0;
2139 return result;
2140}
2141
2142static const lazyenum_funcs lazy_select_funcs = {
2143 lazy_select_proc, 0,
2144};
2145
2146/*
2147 * call-seq:
2148 * lazy.find_all { |obj| block } -> lazy_enumerator
2149 * lazy.select { |obj| block } -> lazy_enumerator
2150 * lazy.filter { |obj| block } -> lazy_enumerator
2151 *
2152 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2153 */
2154static VALUE
2155lazy_select(VALUE obj)
2156{
2157 LAZY_NEED_BLOCK(select);
2158 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2159}
2160
2161static struct MEMO *
2162lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2163{
2164 VALUE value = lazyenum_yield_values(proc_entry, result);
2165 if (!RTEST(value)) return 0;
2166 LAZY_MEMO_SET_VALUE(result, value);
2167 LAZY_MEMO_RESET_PACKED(result);
2168 return result;
2169}
2170
2171static const lazyenum_funcs lazy_filter_map_funcs = {
2172 lazy_filter_map_proc, 0,
2173};
2174
2175/*
2176 * call-seq:
2177 * lazy.filter_map { |obj| block } -> lazy_enumerator
2178 *
2179 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2180 *
2181 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2182 * #=> [4, 8, 12, 16, 20]
2183 */
2184
2185static VALUE
2186lazy_filter_map(VALUE obj)
2187{
2188 LAZY_NEED_BLOCK(filter_map);
2189 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2190}
2191
2192static struct MEMO *
2193lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2194{
2195 VALUE chain = lazyenum_yield(proc_entry, result);
2196 if (RTEST(chain)) return 0;
2197 return result;
2198}
2199
2200static const lazyenum_funcs lazy_reject_funcs = {
2201 lazy_reject_proc, 0,
2202};
2203
2204/*
2205 * call-seq:
2206 * lazy.reject { |obj| block } -> lazy_enumerator
2207 *
2208 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2209 */
2210
2211static VALUE
2212lazy_reject(VALUE obj)
2213{
2214 LAZY_NEED_BLOCK(reject);
2215 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2216}
2217
2218static struct MEMO *
2219lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2220{
2221 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2222 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2223 if (!RTEST(chain)) return 0;
2224 return result;
2225}
2226
2227static struct MEMO *
2228lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2229{
2230 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2231 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2232
2233 if (!RTEST(chain)) return 0;
2234 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2235 LAZY_MEMO_SET_VALUE(result, value);
2236 LAZY_MEMO_RESET_PACKED(result);
2237
2238 return result;
2239}
2240
2241static const lazyenum_funcs lazy_grep_iter_funcs = {
2242 lazy_grep_iter_proc, 0,
2243};
2244
2245static const lazyenum_funcs lazy_grep_funcs = {
2246 lazy_grep_proc, 0,
2247};
2248
2249/*
2250 * call-seq:
2251 * lazy.grep(pattern) -> lazy_enumerator
2252 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2253 *
2254 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2255 */
2256
2257static VALUE
2258lazy_grep(VALUE obj, VALUE pattern)
2259{
2260 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2261 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2262 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2263}
2264
2265static struct MEMO *
2266lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2267{
2268 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2269 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2270 if (RTEST(chain)) return 0;
2271 return result;
2272}
2273
2274static struct MEMO *
2275lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2276{
2277 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2278 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2279
2280 if (RTEST(chain)) return 0;
2281 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2282 LAZY_MEMO_SET_VALUE(result, value);
2283 LAZY_MEMO_RESET_PACKED(result);
2284
2285 return result;
2286}
2287
2288static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2289 lazy_grep_v_iter_proc, 0,
2290};
2291
2292static const lazyenum_funcs lazy_grep_v_funcs = {
2293 lazy_grep_v_proc, 0,
2294};
2295
2296/*
2297 * call-seq:
2298 * lazy.grep_v(pattern) -> lazy_enumerator
2299 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2300 *
2301 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2302 */
2303
2304static VALUE
2305lazy_grep_v(VALUE obj, VALUE pattern)
2306{
2307 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2308 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2309 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2310}
2311
2312static VALUE
2313call_next(VALUE obj)
2314{
2315 return rb_funcall(obj, id_next, 0);
2316}
2317
2318static VALUE
2319next_stopped(VALUE obj, VALUE _)
2320{
2321 return Qnil;
2322}
2323
2324static struct MEMO *
2325lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2326{
2327 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2328 VALUE ary, arrays = entry->memo;
2329 VALUE memo = rb_ary_entry(memos, memo_index);
2330 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2331
2332 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2333 rb_ary_push(ary, result->memo_value);
2334 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2335 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2336 }
2337 LAZY_MEMO_SET_VALUE(result, ary);
2338 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2339 return result;
2340}
2341
2342static struct MEMO *
2343lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2344{
2345 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2346 VALUE arg = rb_ary_entry(memos, memo_index);
2347 VALUE zip_args = entry->memo;
2348 VALUE ary, v;
2349 long i;
2350
2351 if (NIL_P(arg)) {
2352 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2353 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2354 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2355 }
2356 rb_ary_store(memos, memo_index, arg);
2357 }
2358
2359 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2360 rb_ary_push(ary, result->memo_value);
2361 for (i = 0; i < RARRAY_LEN(arg); i++) {
2362 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2364 rb_ary_push(ary, v);
2365 }
2366 LAZY_MEMO_SET_VALUE(result, ary);
2367 return result;
2368}
2369
2370static const lazyenum_funcs lazy_zip_funcs[] = {
2371 {lazy_zip_func, lazy_receiver_size,},
2372 {lazy_zip_arrays_func, lazy_receiver_size,},
2373};
2374
2375/*
2376 * call-seq:
2377 * lazy.zip(arg, ...) -> lazy_enumerator
2378 * lazy.zip(arg, ...) { |arr| block } -> nil
2379 *
2380 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2381 * However, if a block is given to zip, values are enumerated immediately.
2382 */
2383static VALUE
2384lazy_zip(int argc, VALUE *argv, VALUE obj)
2385{
2386 VALUE ary, v;
2387 long i;
2388 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2389
2390 if (rb_block_given_p()) {
2391 return rb_call_super(argc, argv);
2392 }
2393
2394 ary = rb_ary_new2(argc);
2395 for (i = 0; i < argc; i++) {
2396 v = rb_check_array_type(argv[i]);
2397 if (NIL_P(v)) {
2398 for (; i < argc; i++) {
2399 if (!rb_respond_to(argv[i], id_each)) {
2400 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2401 rb_obj_class(argv[i]));
2402 }
2403 }
2404 ary = rb_ary_new4(argc, argv);
2405 funcs = &lazy_zip_funcs[0];
2406 break;
2407 }
2408 rb_ary_push(ary, v);
2409 }
2410
2411 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2412}
2413
2414static struct MEMO *
2415lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2416{
2417 long remain;
2418 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2419 VALUE memo = rb_ary_entry(memos, memo_index);
2420
2421 if (NIL_P(memo)) {
2422 memo = entry->memo;
2423 }
2424
2425 remain = NUM2LONG(memo);
2426 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2427 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2428 return result;
2429}
2430
2431static VALUE
2432lazy_take_size(VALUE entry, VALUE receiver)
2433{
2434 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2435 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2436 return receiver;
2437 return LONG2NUM(len);
2438}
2439
2440static int
2441lazy_take_precheck(VALUE proc_entry)
2442{
2443 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2444 return entry->memo != INT2FIX(0);
2445}
2446
2447static const lazyenum_funcs lazy_take_funcs = {
2448 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2449};
2450
2451/*
2452 * call-seq:
2453 * lazy.take(n) -> lazy_enumerator
2454 *
2455 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2456 */
2457
2458static VALUE
2459lazy_take(VALUE obj, VALUE n)
2460{
2461 long len = NUM2LONG(n);
2462
2463 if (len < 0) {
2464 rb_raise(rb_eArgError, "attempt to take negative size");
2465 }
2466
2467 n = LONG2NUM(len); /* no more conversion */
2468
2469 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2470}
2471
2472static struct MEMO *
2473lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2474{
2475 VALUE take = lazyenum_yield_values(proc_entry, result);
2476 if (!RTEST(take)) {
2477 LAZY_MEMO_SET_BREAK(result);
2478 return 0;
2479 }
2480 return result;
2481}
2482
2483static const lazyenum_funcs lazy_take_while_funcs = {
2484 lazy_take_while_proc, 0,
2485};
2486
2487/*
2488 * call-seq:
2489 * lazy.take_while { |obj| block } -> lazy_enumerator
2490 *
2491 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2492 */
2493
2494static VALUE
2495lazy_take_while(VALUE obj)
2496{
2497 LAZY_NEED_BLOCK(take_while);
2498 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2499}
2500
2501static VALUE
2502lazy_drop_size(VALUE proc_entry, VALUE receiver)
2503{
2504 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2505 if (NIL_P(receiver))
2506 return receiver;
2507 if (FIXNUM_P(receiver)) {
2508 len = FIX2LONG(receiver) - len;
2509 return LONG2FIX(len < 0 ? 0 : len);
2510 }
2511 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2512}
2513
2514static struct MEMO *
2515lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2516{
2517 long remain;
2518 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2519 VALUE memo = rb_ary_entry(memos, memo_index);
2520
2521 if (NIL_P(memo)) {
2522 memo = entry->memo;
2523 }
2524 remain = NUM2LONG(memo);
2525 if (remain > 0) {
2526 --remain;
2527 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2528 return 0;
2529 }
2530
2531 return result;
2532}
2533
2534static const lazyenum_funcs lazy_drop_funcs = {
2535 lazy_drop_proc, lazy_drop_size,
2536};
2537
2538/*
2539 * call-seq:
2540 * lazy.drop(n) -> lazy_enumerator
2541 *
2542 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2543 */
2544
2545static VALUE
2546lazy_drop(VALUE obj, VALUE n)
2547{
2548 long len = NUM2LONG(n);
2549 VALUE argv[2];
2550 argv[0] = sym_each;
2551 argv[1] = n;
2552
2553 if (len < 0) {
2554 rb_raise(rb_eArgError, "attempt to drop negative size");
2555 }
2556
2557 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2558}
2559
2560static struct MEMO *
2561lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2562{
2563 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2564 VALUE memo = rb_ary_entry(memos, memo_index);
2565
2566 if (NIL_P(memo)) {
2567 memo = entry->memo;
2568 }
2569
2570 if (!RTEST(memo)) {
2571 VALUE drop = lazyenum_yield_values(proc_entry, result);
2572 if (RTEST(drop)) return 0;
2573 rb_ary_store(memos, memo_index, Qtrue);
2574 }
2575 return result;
2576}
2577
2578static const lazyenum_funcs lazy_drop_while_funcs = {
2579 lazy_drop_while_proc, 0,
2580};
2581
2582/*
2583 * call-seq:
2584 * lazy.drop_while { |obj| block } -> lazy_enumerator
2585 *
2586 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2587 */
2588
2589static VALUE
2590lazy_drop_while(VALUE obj)
2591{
2592 LAZY_NEED_BLOCK(drop_while);
2593 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2594}
2595
2596static int
2597lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2598{
2599 VALUE hash = rb_ary_entry(memos, memo_index);
2600
2601 if (NIL_P(hash)) {
2602 hash = rb_obj_hide(rb_hash_new());
2603 rb_ary_store(memos, memo_index, hash);
2604 }
2605
2606 return rb_hash_add_new_element(hash, chain, Qfalse);
2607}
2608
2609static struct MEMO *
2610lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2611{
2612 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2613 return result;
2614}
2615
2616static struct MEMO *
2617lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2618{
2619 VALUE chain = lazyenum_yield(proc_entry, result);
2620
2621 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2622 return result;
2623}
2624
2625static const lazyenum_funcs lazy_uniq_iter_funcs = {
2626 lazy_uniq_iter_proc, 0,
2627};
2628
2629static const lazyenum_funcs lazy_uniq_funcs = {
2630 lazy_uniq_proc, 0,
2631};
2632
2633/*
2634 * call-seq:
2635 * lazy.uniq -> lazy_enumerator
2636 * lazy.uniq { |item| block } -> lazy_enumerator
2637 *
2638 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2639 */
2640
2641static VALUE
2642lazy_uniq(VALUE obj)
2643{
2644 const lazyenum_funcs *const funcs =
2645 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2646 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2647}
2648
2649static struct MEMO *
2650lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2651{
2652 if (NIL_P(result->memo_value)) return 0;
2653 return result;
2654}
2655
2656static const lazyenum_funcs lazy_compact_funcs = {
2657 lazy_compact_proc, 0,
2658};
2659
2660/*
2661 * call-seq:
2662 * lazy.compact -> lazy_enumerator
2663 *
2664 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2665 */
2666
2667static VALUE
2668lazy_compact(VALUE obj)
2669{
2670 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2671}
2672
2673static struct MEMO *
2674lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2675{
2676 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2677 VALUE memo = rb_ary_entry(memos, memo_index);
2678 VALUE argv[2];
2679
2680 if (NIL_P(memo)) {
2681 memo = entry->memo;
2682 }
2683
2684 argv[0] = result->memo_value;
2685 argv[1] = memo;
2686 if (entry->proc) {
2687 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2688 LAZY_MEMO_RESET_PACKED(result);
2689 }
2690 else {
2691 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2692 LAZY_MEMO_SET_PACKED(result);
2693 }
2694 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2695 return result;
2696}
2697
2698static VALUE
2699lazy_with_index_size(VALUE proc, VALUE receiver)
2700{
2701 return receiver;
2702}
2703
2704static const lazyenum_funcs lazy_with_index_funcs = {
2705 lazy_with_index_proc, lazy_with_index_size,
2706};
2707
2708/*
2709 * call-seq:
2710 * lazy.with_index(offset = 0) {|(*args), idx| block }
2711 * lazy.with_index(offset = 0)
2712 *
2713 * If a block is given, returns a lazy enumerator that will
2714 * iterate over the given block for each element
2715 * with an index, which starts from +offset+, and returns a
2716 * lazy enumerator that yields the same values (without the index).
2717 *
2718 * If a block is not given, returns a new lazy enumerator that
2719 * includes the index, starting from +offset+.
2720 *
2721 * +offset+:: the starting index to use
2722 *
2723 * See Enumerator#with_index.
2724 */
2725static VALUE
2726lazy_with_index(int argc, VALUE *argv, VALUE obj)
2727{
2728 VALUE memo;
2729
2730 rb_scan_args(argc, argv, "01", &memo);
2731 if (NIL_P(memo))
2732 memo = LONG2NUM(0);
2733
2734 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2735}
2736
2737#if 0 /* for RDoc */
2738
2739/*
2740 * call-seq:
2741 * lazy.chunk { |elt| ... } -> lazy_enumerator
2742 *
2743 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2744 */
2745static VALUE
2746lazy_chunk(VALUE self)
2747{
2748}
2749
2750/*
2751 * call-seq:
2752 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2753 *
2754 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2755 */
2756static VALUE
2757lazy_chunk_while(VALUE self)
2758{
2759}
2760
2761/*
2762 * call-seq:
2763 * lazy.slice_after(pattern) -> lazy_enumerator
2764 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2765 *
2766 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2767 */
2768static VALUE
2769lazy_slice_after(VALUE self)
2770{
2771}
2772
2773/*
2774 * call-seq:
2775 * lazy.slice_before(pattern) -> lazy_enumerator
2776 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2777 *
2778 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2779 */
2780static VALUE
2781lazy_slice_before(VALUE self)
2782{
2783}
2784
2785/*
2786 * call-seq:
2787 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2788 *
2789 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2790 */
2791static VALUE
2792lazy_slice_when(VALUE self)
2793{
2794}
2795# endif
2796
2797static VALUE
2798lazy_super(int argc, VALUE *argv, VALUE lazy)
2799{
2800 return enumerable_lazy(rb_call_super(argc, argv));
2801}
2802
2803/*
2804 * call-seq:
2805 * enum.lazy -> lazy_enumerator
2806 *
2807 * Returns self.
2808 */
2809
2810static VALUE
2811lazy_lazy(VALUE obj)
2812{
2813 return obj;
2814}
2815
2816/*
2817 * Document-class: StopIteration
2818 *
2819 * Raised to stop the iteration, in particular by Enumerator#next. It is
2820 * rescued by Kernel#loop.
2821 *
2822 * loop do
2823 * puts "Hello"
2824 * raise StopIteration
2825 * puts "World"
2826 * end
2827 * puts "Done!"
2828 *
2829 * <em>produces:</em>
2830 *
2831 * Hello
2832 * Done!
2833 */
2834
2835/*
2836 * call-seq:
2837 * result -> value
2838 *
2839 * Returns the return value of the iterator.
2840 *
2841 * o = Object.new
2842 * def o.each
2843 * yield 1
2844 * yield 2
2845 * yield 3
2846 * 100
2847 * end
2848 *
2849 * e = o.to_enum
2850 *
2851 * puts e.next #=> 1
2852 * puts e.next #=> 2
2853 * puts e.next #=> 3
2854 *
2855 * begin
2856 * e.next
2857 * rescue StopIteration => ex
2858 * puts ex.result #=> 100
2859 * end
2860 *
2861 */
2862
2863static VALUE
2864stop_result(VALUE self)
2865{
2866 return rb_attr_get(self, id_result);
2867}
2868
2869/*
2870 * Producer
2871 */
2872
2873static void
2874producer_mark_and_move(void *p)
2875{
2876 struct producer *ptr = p;
2877 rb_gc_mark_and_move(&ptr->init);
2878 rb_gc_mark_and_move(&ptr->proc);
2879}
2880
2881#define producer_free RUBY_TYPED_DEFAULT_FREE
2882
2883static size_t
2884producer_memsize(const void *p)
2885{
2886 return sizeof(struct producer);
2887}
2888
2889static const rb_data_type_t producer_data_type = {
2890 "producer",
2891 {
2892 producer_mark_and_move,
2893 producer_free,
2894 producer_memsize,
2895 producer_mark_and_move,
2896 },
2897 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
2898};
2899
2900static struct producer *
2901producer_ptr(VALUE obj)
2902{
2903 struct producer *ptr;
2904
2905 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2906 if (!ptr || UNDEF_P(ptr->proc)) {
2907 rb_raise(rb_eArgError, "uninitialized producer");
2908 }
2909 return ptr;
2910}
2911
2912/* :nodoc: */
2913static VALUE
2914producer_allocate(VALUE klass)
2915{
2916 struct producer *ptr;
2917 VALUE obj;
2918
2919 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
2920 ptr->init = Qundef;
2921 ptr->proc = Qundef;
2922
2923 return obj;
2924}
2925
2926static VALUE
2927producer_init(VALUE obj, VALUE init, VALUE proc)
2928{
2929 struct producer *ptr;
2930
2931 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2932
2933 if (!ptr) {
2934 rb_raise(rb_eArgError, "unallocated producer");
2935 }
2936
2937 RB_OBJ_WRITE(obj, &ptr->init, init);
2938 RB_OBJ_WRITE(obj, &ptr->proc, proc);
2939
2940 return obj;
2941}
2942
2943static VALUE
2944producer_each_stop(VALUE dummy, VALUE exc)
2945{
2946 return rb_attr_get(exc, id_result);
2947}
2948
2949NORETURN(static VALUE producer_each_i(VALUE obj));
2950
2951static VALUE
2952producer_each_i(VALUE obj)
2953{
2954 struct producer *ptr;
2955 VALUE init, proc, curr;
2956
2957 ptr = producer_ptr(obj);
2958 init = ptr->init;
2959 proc = ptr->proc;
2960
2961 if (UNDEF_P(init)) {
2962 curr = Qnil;
2963 }
2964 else {
2965 rb_yield(init);
2966 curr = init;
2967 }
2968
2969 for (;;) {
2970 curr = rb_funcall(proc, id_call, 1, curr);
2971 rb_yield(curr);
2972 }
2973
2975}
2976
2977/* :nodoc: */
2978static VALUE
2979producer_each(VALUE obj)
2980{
2981 rb_need_block();
2982
2983 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
2984}
2985
2986static VALUE
2987producer_size(VALUE obj, VALUE args, VALUE eobj)
2988{
2989 return DBL2NUM(HUGE_VAL);
2990}
2991
2992/*
2993 * call-seq:
2994 * Enumerator.produce(initial = nil) { |prev| block } -> enumerator
2995 *
2996 * Creates an infinite enumerator from any block, just called over and
2997 * over. The result of the previous iteration is passed to the next one.
2998 * If +initial+ is provided, it is passed to the first iteration, and
2999 * becomes the first element of the enumerator; if it is not provided,
3000 * the first iteration receives +nil+, and its result becomes the first
3001 * element of the iterator.
3002 *
3003 * Raising StopIteration from the block stops an iteration.
3004 *
3005 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3006 *
3007 * Enumerator.produce { rand(10) } # => infinite random number sequence
3008 *
3009 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3010 * enclosing_section = ancestors.find { |n| n.type == :section }
3011 *
3012 * Using ::produce together with Enumerable methods like Enumerable#detect,
3013 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3014 * for +while+ and +until+ cycles:
3015 *
3016 * # Find next Tuesday
3017 * require "date"
3018 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3019 *
3020 * # Simple lexer:
3021 * require "strscan"
3022 * scanner = StringScanner.new("7+38/6")
3023 * PATTERN = %r{\d+|[-/+*]}
3024 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3025 * # => ["7", "+", "38", "/", "6"]
3026 */
3027static VALUE
3028enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3029{
3030 VALUE init, producer;
3031
3032 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3033
3034 if (rb_scan_args(argc, argv, "01", &init) == 0) {
3035 init = Qundef;
3036 }
3037
3038 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc());
3039
3040 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3041}
3042
3043/*
3044 * Document-class: Enumerator::Chain
3045 *
3046 * Enumerator::Chain is a subclass of Enumerator, which represents a
3047 * chain of enumerables that works as a single enumerator.
3048 *
3049 * This type of objects can be created by Enumerable#chain and
3050 * Enumerator#+.
3051 */
3052
3053static void
3054enum_chain_mark_and_move(void *p)
3055{
3056 struct enum_chain *ptr = p;
3057 rb_gc_mark_and_move(&ptr->enums);
3058}
3059
3060#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3061
3062static size_t
3063enum_chain_memsize(const void *p)
3064{
3065 return sizeof(struct enum_chain);
3066}
3067
3068static const rb_data_type_t enum_chain_data_type = {
3069 "chain",
3070 {
3071 enum_chain_mark_and_move,
3072 enum_chain_free,
3073 enum_chain_memsize,
3074 enum_chain_mark_and_move,
3075 },
3076 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3077};
3078
3079static struct enum_chain *
3080enum_chain_ptr(VALUE obj)
3081{
3082 struct enum_chain *ptr;
3083
3084 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3085 if (!ptr || UNDEF_P(ptr->enums)) {
3086 rb_raise(rb_eArgError, "uninitialized chain");
3087 }
3088 return ptr;
3089}
3090
3091/* :nodoc: */
3092static VALUE
3093enum_chain_allocate(VALUE klass)
3094{
3095 struct enum_chain *ptr;
3096 VALUE obj;
3097
3098 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3099 ptr->enums = Qundef;
3100 ptr->pos = -1;
3101
3102 return obj;
3103}
3104
3105/*
3106 * call-seq:
3107 * Enumerator::Chain.new(*enums) -> enum
3108 *
3109 * Generates a new enumerator object that iterates over the elements
3110 * of given enumerable objects in sequence.
3111 *
3112 * e = Enumerator::Chain.new(1..3, [4, 5])
3113 * e.to_a #=> [1, 2, 3, 4, 5]
3114 * e.size #=> 5
3115 */
3116static VALUE
3117enum_chain_initialize(VALUE obj, VALUE enums)
3118{
3119 struct enum_chain *ptr;
3120
3121 rb_check_frozen(obj);
3122 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3123
3124 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3125
3126 ptr->enums = rb_ary_freeze(enums);
3127 ptr->pos = -1;
3128
3129 return obj;
3130}
3131
3132static VALUE
3133new_enum_chain(VALUE enums)
3134{
3135 long i;
3136 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3137
3138 for (i = 0; i < RARRAY_LEN(enums); i++) {
3139 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3140 return enumerable_lazy(obj);
3141 }
3142 }
3143
3144 return obj;
3145}
3146
3147/* :nodoc: */
3148static VALUE
3149enum_chain_init_copy(VALUE obj, VALUE orig)
3150{
3151 struct enum_chain *ptr0, *ptr1;
3152
3153 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3154 ptr0 = enum_chain_ptr(orig);
3155
3156 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3157
3158 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3159
3160 ptr1->enums = ptr0->enums;
3161 ptr1->pos = ptr0->pos;
3162
3163 return obj;
3164}
3165
3166static VALUE
3167enum_chain_total_size(VALUE enums)
3168{
3169 VALUE total = INT2FIX(0);
3170 long i;
3171
3172 for (i = 0; i < RARRAY_LEN(enums); i++) {
3173 VALUE size = enum_size(RARRAY_AREF(enums, i));
3174
3175 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3176 return size;
3177 }
3178 if (!RB_INTEGER_TYPE_P(size)) {
3179 return Qnil;
3180 }
3181
3182 total = rb_funcall(total, '+', 1, size);
3183 }
3184
3185 return total;
3186}
3187
3188/*
3189 * call-seq:
3190 * obj.size -> int, Float::INFINITY or nil
3191 *
3192 * Returns the total size of the enumerator chain calculated by
3193 * summing up the size of each enumerable in the chain. If any of the
3194 * enumerables reports its size as nil or Float::INFINITY, that value
3195 * is returned as the total size.
3196 */
3197static VALUE
3198enum_chain_size(VALUE obj)
3199{
3200 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3201}
3202
3203static VALUE
3204enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3205{
3206 return enum_chain_size(obj);
3207}
3208
3209static VALUE
3210enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3211{
3212 return Qnil;
3213}
3214
3215/*
3216 * call-seq:
3217 * obj.each(*args) { |...| ... } -> obj
3218 * obj.each(*args) -> enumerator
3219 *
3220 * Iterates over the elements of the first enumerable by calling the
3221 * "each" method on it with the given arguments, then proceeds to the
3222 * following enumerables in sequence until all of the enumerables are
3223 * exhausted.
3224 *
3225 * If no block is given, returns an enumerator.
3226 */
3227static VALUE
3228enum_chain_each(int argc, VALUE *argv, VALUE obj)
3229{
3230 VALUE enums, block;
3231 struct enum_chain *objptr;
3232 long i;
3233
3234 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3235
3236 objptr = enum_chain_ptr(obj);
3237 enums = objptr->enums;
3238 block = rb_block_proc();
3239
3240 for (i = 0; i < RARRAY_LEN(enums); i++) {
3241 objptr->pos = i;
3242 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3243 }
3244
3245 return obj;
3246}
3247
3248/*
3249 * call-seq:
3250 * obj.rewind -> obj
3251 *
3252 * Rewinds the enumerator chain by calling the "rewind" method on each
3253 * enumerable in reverse order. Each call is performed only if the
3254 * enumerable responds to the method.
3255 */
3256static VALUE
3257enum_chain_rewind(VALUE obj)
3258{
3259 struct enum_chain *objptr = enum_chain_ptr(obj);
3260 VALUE enums = objptr->enums;
3261 long i;
3262
3263 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3264 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3265 }
3266
3267 return obj;
3268}
3269
3270static VALUE
3271inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3272{
3273 VALUE klass = rb_obj_class(obj);
3274 struct enum_chain *ptr;
3275
3276 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3277
3278 if (!ptr || UNDEF_P(ptr->enums)) {
3279 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3280 }
3281
3282 if (recur) {
3283 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3284 }
3285
3286 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3287}
3288
3289/*
3290 * call-seq:
3291 * obj.inspect -> string
3292 *
3293 * Returns a printable version of the enumerator chain.
3294 */
3295static VALUE
3296enum_chain_inspect(VALUE obj)
3297{
3298 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3299}
3300
3301/*
3302 * call-seq:
3303 * e.chain(*enums) -> enumerator
3304 *
3305 * Returns an enumerator object generated from this enumerator and
3306 * given enumerables.
3307 *
3308 * e = (1..3).chain([4, 5])
3309 * e.to_a #=> [1, 2, 3, 4, 5]
3310 */
3311static VALUE
3312enum_chain(int argc, VALUE *argv, VALUE obj)
3313{
3314 VALUE enums = rb_ary_new_from_values(1, &obj);
3315 rb_ary_cat(enums, argv, argc);
3316 return new_enum_chain(enums);
3317}
3318
3319/*
3320 * call-seq:
3321 * e + enum -> enumerator
3322 *
3323 * Returns an enumerator object generated from this enumerator and a
3324 * given enumerable.
3325 *
3326 * e = (1..3).each + [4, 5]
3327 * e.to_a #=> [1, 2, 3, 4, 5]
3328 */
3329static VALUE
3330enumerator_plus(VALUE obj, VALUE eobj)
3331{
3332 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3333}
3334
3335/*
3336 * Document-class: Enumerator::Product
3337 *
3338 * Enumerator::Product generates a Cartesian product of any number of
3339 * enumerable objects. Iterating over the product of enumerable
3340 * objects is roughly equivalent to nested each_entry loops where the
3341 * loop for the rightmost object is put innermost.
3342 *
3343 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3344 *
3345 * innings.each do |i, h|
3346 * p [i, h]
3347 * end
3348 * # [1, "top"]
3349 * # [1, "bottom"]
3350 * # [2, "top"]
3351 * # [2, "bottom"]
3352 * # [3, "top"]
3353 * # [3, "bottom"]
3354 * # ...
3355 * # [9, "top"]
3356 * # [9, "bottom"]
3357 *
3358 * The method used against each enumerable object is `each_entry`
3359 * instead of `each` so that the product of N enumerable objects
3360 * yields an array of exactly N elements in each iteration.
3361 *
3362 * When no enumerator is given, it calls a given block once yielding
3363 * an empty argument list.
3364 *
3365 * This type of objects can be created by Enumerator.product.
3366 */
3367
3368static void
3369enum_product_mark_and_move(void *p)
3370{
3371 struct enum_product *ptr = p;
3372 rb_gc_mark_and_move(&ptr->enums);
3373}
3374
3375#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3376
3377static size_t
3378enum_product_memsize(const void *p)
3379{
3380 return sizeof(struct enum_product);
3381}
3382
3383static const rb_data_type_t enum_product_data_type = {
3384 "product",
3385 {
3386 enum_product_mark_and_move,
3387 enum_product_free,
3388 enum_product_memsize,
3389 enum_product_mark_and_move,
3390 },
3391 0, 0, RUBY_TYPED_FREE_IMMEDIATELY
3392};
3393
3394static struct enum_product *
3395enum_product_ptr(VALUE obj)
3396{
3397 struct enum_product *ptr;
3398
3399 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3400 if (!ptr || UNDEF_P(ptr->enums)) {
3401 rb_raise(rb_eArgError, "uninitialized product");
3402 }
3403 return ptr;
3404}
3405
3406/* :nodoc: */
3407static VALUE
3408enum_product_allocate(VALUE klass)
3409{
3410 struct enum_product *ptr;
3411 VALUE obj;
3412
3413 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3414 ptr->enums = Qundef;
3415
3416 return obj;
3417}
3418
3419/*
3420 * call-seq:
3421 * Enumerator::Product.new(*enums) -> enum
3422 *
3423 * Generates a new enumerator object that generates a Cartesian
3424 * product of given enumerable objects.
3425 *
3426 * e = Enumerator::Product.new(1..3, [4, 5])
3427 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3428 * e.size #=> 6
3429 */
3430static VALUE
3431enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3432{
3433 struct enum_product *ptr;
3434 VALUE enums = Qnil, options = Qnil;
3435
3436 rb_scan_args(argc, argv, "*:", &enums, &options);
3437
3438 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3439 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3440 }
3441
3442 rb_check_frozen(obj);
3443 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3444
3445 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3446
3447 ptr->enums = rb_ary_freeze(enums);
3448
3449 return obj;
3450}
3451
3452/* :nodoc: */
3453static VALUE
3454enum_product_init_copy(VALUE obj, VALUE orig)
3455{
3456 struct enum_product *ptr0, *ptr1;
3457
3458 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3459 ptr0 = enum_product_ptr(orig);
3460
3461 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3462
3463 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3464
3465 ptr1->enums = ptr0->enums;
3466
3467 return obj;
3468}
3469
3470static VALUE
3471enum_product_total_size(VALUE enums)
3472{
3473 VALUE total = INT2FIX(1);
3474 VALUE sizes = rb_ary_hidden_new(RARRAY_LEN(enums));
3475 long i;
3476
3477 for (i = 0; i < RARRAY_LEN(enums); i++) {
3478 VALUE size = enum_size(RARRAY_AREF(enums, i));
3479 if (size == INT2FIX(0)) {
3480 rb_ary_resize(sizes, 0);
3481 return size;
3482 }
3483 rb_ary_push(sizes, size);
3484 }
3485 for (i = 0; i < RARRAY_LEN(sizes); i++) {
3486 VALUE size = RARRAY_AREF(sizes, i);
3487
3488 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3489 return size;
3490 }
3491 if (!RB_INTEGER_TYPE_P(size)) {
3492 return Qnil;
3493 }
3494
3495 total = rb_funcall(total, '*', 1, size);
3496 }
3497
3498 return total;
3499}
3500
3501/*
3502 * call-seq:
3503 * obj.size -> int, Float::INFINITY or nil
3504 *
3505 * Returns the total size of the enumerator product calculated by
3506 * multiplying the sizes of enumerables in the product. If any of the
3507 * enumerables reports its size as nil or Float::INFINITY, that value
3508 * is returned as the size.
3509 */
3510static VALUE
3511enum_product_size(VALUE obj)
3512{
3513 return enum_product_total_size(enum_product_ptr(obj)->enums);
3514}
3515
3516static VALUE
3517enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3518{
3519 return enum_product_size(obj);
3520}
3521
3523 VALUE obj;
3524 VALUE block;
3525 int argc;
3526 VALUE *argv;
3527 int index;
3528};
3529
3530static VALUE product_each(VALUE, struct product_state *);
3531
3532static VALUE
3533product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3534{
3535 struct product_state *pstate = (struct product_state *)state;
3536 pstate->argv[pstate->index++] = value;
3537
3538 VALUE val = product_each(pstate->obj, pstate);
3539 pstate->index--;
3540 return val;
3541}
3542
3543static VALUE
3544product_each(VALUE obj, struct product_state *pstate)
3545{
3546 struct enum_product *ptr = enum_product_ptr(obj);
3547 VALUE enums = ptr->enums;
3548
3549 if (pstate->index < pstate->argc) {
3550 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3551
3552 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3553 }
3554 else {
3555 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3556 }
3557
3558 return obj;
3559}
3560
3561static VALUE
3562enum_product_run(VALUE obj, VALUE block)
3563{
3564 struct enum_product *ptr = enum_product_ptr(obj);
3565 int argc = RARRAY_LENINT(ptr->enums);
3566 struct product_state state = {
3567 .obj = obj,
3568 .block = block,
3569 .index = 0,
3570 .argc = argc,
3571 .argv = ALLOCA_N(VALUE, argc),
3572 };
3573
3574 return product_each(obj, &state);
3575}
3576
3577/*
3578 * call-seq:
3579 * obj.each { |...| ... } -> obj
3580 * obj.each -> enumerator
3581 *
3582 * Iterates over the elements of the first enumerable by calling the
3583 * "each_entry" method on it with the given arguments, then proceeds
3584 * to the following enumerables in sequence until all of the
3585 * enumerables are exhausted.
3586 *
3587 * If no block is given, returns an enumerator. Otherwise, returns self.
3588 */
3589static VALUE
3590enum_product_each(VALUE obj)
3591{
3592 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3593
3594 return enum_product_run(obj, rb_block_proc());
3595}
3596
3597/*
3598 * call-seq:
3599 * obj.rewind -> obj
3600 *
3601 * Rewinds the product enumerator by calling the "rewind" method on
3602 * each enumerable in reverse order. Each call is performed only if
3603 * the enumerable responds to the method.
3604 */
3605static VALUE
3606enum_product_rewind(VALUE obj)
3607{
3608 struct enum_product *ptr = enum_product_ptr(obj);
3609 VALUE enums = ptr->enums;
3610 long i;
3611
3612 for (i = 0; i < RARRAY_LEN(enums); i++) {
3613 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3614 }
3615
3616 return obj;
3617}
3618
3619static VALUE
3620inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3621{
3622 VALUE klass = rb_obj_class(obj);
3623 struct enum_product *ptr;
3624
3625 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3626
3627 if (!ptr || UNDEF_P(ptr->enums)) {
3628 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3629 }
3630
3631 if (recur) {
3632 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3633 }
3634
3635 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3636}
3637
3638/*
3639 * call-seq:
3640 * obj.inspect -> string
3641 *
3642 * Returns a printable version of the product enumerator.
3643 */
3644static VALUE
3645enum_product_inspect(VALUE obj)
3646{
3647 return rb_exec_recursive(inspect_enum_product, obj, 0);
3648}
3649
3650/*
3651 * call-seq:
3652 * Enumerator.product(*enums) -> enumerator
3653 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3654 *
3655 * Generates a new enumerator object that generates a Cartesian
3656 * product of given enumerable objects. This is equivalent to
3657 * Enumerator::Product.new.
3658 *
3659 * e = Enumerator.product(1..3, [4, 5])
3660 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3661 * e.size #=> 6
3662 *
3663 * When a block is given, calls the block with each N-element array
3664 * generated and returns +nil+.
3665 */
3666static VALUE
3667enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3668{
3669 VALUE enums = Qnil, options = Qnil, block = Qnil;
3670
3671 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3672
3673 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3674 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3675 }
3676
3677 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3678
3679 if (!NIL_P(block)) {
3680 enum_product_run(obj, block);
3681 return Qnil;
3682 }
3683
3684 return obj;
3685}
3686
3688 struct enumerator enumerator;
3689 VALUE begin;
3690 VALUE end;
3691 VALUE step;
3692 bool exclude_end;
3693};
3694
3695RUBY_REFERENCES(arith_seq_refs) = {
3696 RUBY_REF_EDGE(struct enumerator, obj),
3697 RUBY_REF_EDGE(struct enumerator, args),
3698 RUBY_REF_EDGE(struct enumerator, fib),
3699 RUBY_REF_EDGE(struct enumerator, dst),
3700 RUBY_REF_EDGE(struct enumerator, lookahead),
3701 RUBY_REF_EDGE(struct enumerator, feedvalue),
3702 RUBY_REF_EDGE(struct enumerator, stop_exc),
3703 RUBY_REF_EDGE(struct enumerator, size),
3704 RUBY_REF_EDGE(struct enumerator, procs),
3705
3706 RUBY_REF_EDGE(struct arith_seq, begin),
3707 RUBY_REF_EDGE(struct arith_seq, end),
3708 RUBY_REF_EDGE(struct arith_seq, step),
3709 RUBY_REF_END
3710};
3711
3712static const rb_data_type_t arith_seq_data_type = {
3713 "arithmetic_sequence",
3714 {
3715 RUBY_REFS_LIST_PTR(arith_seq_refs),
3717 NULL, // Nothing allocated externally, so don't need a memsize function
3718 NULL,
3719 },
3720 .parent = &enumerator_data_type,
3721 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
3722};
3723
3724static VALUE
3725arith_seq_allocate(VALUE klass)
3726{
3727 struct arith_seq *ptr;
3728 VALUE enum_obj;
3729
3730 enum_obj = TypedData_Make_Struct(klass, struct arith_seq, &arith_seq_data_type, ptr);
3731 ptr->enumerator.obj = Qundef;
3732
3733 return enum_obj;
3734}
3735
3736/*
3737 * Document-class: Enumerator::ArithmeticSequence
3738 *
3739 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3740 * that is a representation of sequences of numbers with common difference.
3741 * Instances of this class can be generated by the Range#step and Numeric#step
3742 * methods.
3743 *
3744 * The class can be used for slicing Array (see Array#slice) or custom
3745 * collections.
3746 */
3747
3748VALUE
3749rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3750 rb_enumerator_size_func *size_fn,
3751 VALUE beg, VALUE end, VALUE step, int excl)
3752{
3753 VALUE aseq = enumerator_init(arith_seq_allocate(rb_cArithSeq),
3754 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3755 struct arith_seq *ptr;
3756 TypedData_Get_Struct(aseq, struct arith_seq, &enumerator_data_type, ptr);
3757
3758 RB_OBJ_WRITE(aseq, &ptr->begin, beg);
3759 RB_OBJ_WRITE(aseq, &ptr->end, end);
3760 RB_OBJ_WRITE(aseq, &ptr->step, step);
3761 ptr->exclude_end = excl;
3762
3763 return aseq;
3764}
3765
3766/*
3767 * call-seq: aseq.begin -> num or nil
3768 *
3769 * Returns the number that defines the first element of this arithmetic
3770 * sequence.
3771 */
3772static inline VALUE
3773arith_seq_begin(VALUE self)
3774{
3775 struct arith_seq *ptr;
3776 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3777 return ptr->begin;
3778}
3779
3780/*
3781 * call-seq: aseq.end -> num or nil
3782 *
3783 * Returns the number that defines the end of this arithmetic sequence.
3784 */
3785static inline VALUE
3786arith_seq_end(VALUE self)
3787{
3788 struct arith_seq *ptr;
3789 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3790 return ptr->end;
3791}
3792
3793/*
3794 * call-seq: aseq.step -> num
3795 *
3796 * Returns the number that defines the common difference between
3797 * two adjacent elements in this arithmetic sequence.
3798 */
3799static inline VALUE
3800arith_seq_step(VALUE self)
3801{
3802 struct arith_seq *ptr;
3803 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3804 return ptr->step;
3805}
3806
3807/*
3808 * call-seq: aseq.exclude_end? -> true or false
3809 *
3810 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3811 */
3812static inline VALUE
3813arith_seq_exclude_end(VALUE self)
3814{
3815 struct arith_seq *ptr;
3816 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3817 return RBOOL(ptr->exclude_end);
3818}
3819
3820static inline int
3821arith_seq_exclude_end_p(VALUE self)
3822{
3823 struct arith_seq *ptr;
3824 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3825 return ptr->exclude_end;
3826}
3827
3828int
3829rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3830{
3831 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3832 component->begin = arith_seq_begin(obj);
3833 component->end = arith_seq_end(obj);
3834 component->step = arith_seq_step(obj);
3835 component->exclude_end = arith_seq_exclude_end_p(obj);
3836 return 1;
3837 }
3838 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3839 component->step = INT2FIX(1);
3840 return 1;
3841 }
3842
3843 return 0;
3844}
3845
3846VALUE
3847rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3848{
3849 RBIMPL_NONNULL_ARG(begp);
3850 RBIMPL_NONNULL_ARG(lenp);
3851 RBIMPL_NONNULL_ARG(stepp);
3852
3854 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3855 return Qfalse;
3856 }
3857
3858 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3859 *stepp = step;
3860
3861 if (step < 0) {
3862 if (aseq.exclude_end && !NIL_P(aseq.end)) {
3863 /* Handle exclusion before range reversal */
3864 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
3865
3866 /* Don't exclude the previous beginning */
3867 aseq.exclude_end = 0;
3868 }
3869 VALUE tmp = aseq.begin;
3870 aseq.begin = aseq.end;
3871 aseq.end = tmp;
3872 }
3873
3874 if (err == 0 && (step < -1 || step > 1)) {
3875 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
3876 if (*begp > len)
3877 goto out_of_range;
3878 if (*lenp > len)
3879 goto out_of_range;
3880 return Qtrue;
3881 }
3882 }
3883 else {
3884 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
3885 }
3886
3887 out_of_range:
3888 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
3889 return Qnil;
3890}
3891
3892static VALUE
3893arith_seq_take(VALUE self, VALUE num)
3894{
3895 VALUE b, e, s, ary;
3896 long n;
3897 int x;
3898
3899 n = NUM2LONG(num);
3900 if (n < 0) {
3901 rb_raise(rb_eArgError, "attempt to take negative size");
3902 }
3903 if (n == 0) {
3904 return rb_ary_new_capa(0);
3905 }
3906
3907 b = arith_seq_begin(self);
3908 e = arith_seq_end(self);
3909 s = arith_seq_step(self);
3910 x = arith_seq_exclude_end_p(self);
3911
3912 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
3913 long i = FIX2LONG(b), unit = FIX2LONG(s);
3914 ary = rb_ary_new_capa(n);
3915 while (n > 0 && FIXABLE(i)) {
3916 rb_ary_push(ary, LONG2FIX(i));
3917 i += unit; // FIXABLE + FIXABLE never overflow;
3918 --n;
3919 }
3920 if (n > 0) {
3921 b = LONG2NUM(i);
3922 while (n > 0) {
3923 rb_ary_push(ary, b);
3924 b = rb_big_plus(b, s);
3925 --n;
3926 }
3927 }
3928 return ary;
3929 }
3930 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
3931 long i = FIX2LONG(b);
3932 long end = FIX2LONG(e);
3933 long unit = FIX2LONG(s);
3934 long len;
3935
3936 if (unit >= 0) {
3937 if (!x) end += 1;
3938
3939 len = end - i;
3940 if (len < 0) len = 0;
3941 ary = rb_ary_new_capa((n < len) ? n : len);
3942 while (n > 0 && i < end) {
3943 rb_ary_push(ary, LONG2FIX(i));
3944 if (i + unit < i) break;
3945 i += unit;
3946 --n;
3947 }
3948 }
3949 else {
3950 if (!x) end -= 1;
3951
3952 len = i - end;
3953 if (len < 0) len = 0;
3954 ary = rb_ary_new_capa((n < len) ? n : len);
3955 while (n > 0 && i > end) {
3956 rb_ary_push(ary, LONG2FIX(i));
3957 if (i + unit > i) break;
3958 i += unit;
3959 --n;
3960 }
3961 }
3962 return ary;
3963 }
3964 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
3965 /* generate values like ruby_float_step */
3966
3967 double unit = NUM2DBL(s);
3968 double beg = NUM2DBL(b);
3969 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
3970 double len = ruby_float_step_size(beg, end, unit, x);
3971 long i;
3972
3973 if (n > len)
3974 n = (long)len;
3975
3976 if (isinf(unit)) {
3977 if (len > 0) {
3978 ary = rb_ary_new_capa(1);
3979 rb_ary_push(ary, DBL2NUM(beg));
3980 }
3981 else {
3982 ary = rb_ary_new_capa(0);
3983 }
3984 }
3985 else if (unit == 0) {
3986 VALUE val = DBL2NUM(beg);
3987 ary = rb_ary_new_capa(n);
3988 for (i = 0; i < len; ++i) {
3989 rb_ary_push(ary, val);
3990 }
3991 }
3992 else {
3993 ary = rb_ary_new_capa(n);
3994 for (i = 0; i < n; ++i) {
3995 double d = i*unit+beg;
3996 if (unit >= 0 ? end < d : d < end) d = end;
3997 rb_ary_push(ary, DBL2NUM(d));
3998 }
3999 }
4000
4001 return ary;
4002 }
4003
4004 {
4005 VALUE argv[1];
4006 argv[0] = num;
4007 return rb_call_super(1, argv);
4008 }
4009}
4010
4011/*
4012 * call-seq:
4013 * aseq.first -> num or nil
4014 * aseq.first(n) -> an_array
4015 *
4016 * Returns the first number in this arithmetic sequence,
4017 * or an array of the first +n+ elements.
4018 */
4019static VALUE
4020arith_seq_first(int argc, VALUE *argv, VALUE self)
4021{
4022 VALUE b, e, s;
4023
4024 rb_check_arity(argc, 0, 1);
4025
4026 b = arith_seq_begin(self);
4027 e = arith_seq_end(self);
4028 s = arith_seq_step(self);
4029 if (argc == 0) {
4030 if (NIL_P(b)) {
4031 return Qnil;
4032 }
4033 if (!NIL_P(e)) {
4034 VALUE zero = INT2FIX(0);
4035 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
4036 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
4037 return Qnil;
4038 }
4039 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
4040 return Qnil;
4041 }
4042 }
4043 return b;
4044 }
4045
4046 return arith_seq_take(self, argv[0]);
4047}
4048
4049static inline VALUE
4050num_plus(VALUE a, VALUE b)
4051{
4052 if (RB_INTEGER_TYPE_P(a)) {
4053 return rb_int_plus(a, b);
4054 }
4055 else if (RB_FLOAT_TYPE_P(a)) {
4056 return rb_float_plus(a, b);
4057 }
4058 else if (RB_TYPE_P(a, T_RATIONAL)) {
4059 return rb_rational_plus(a, b);
4060 }
4061 else {
4062 return rb_funcallv(a, '+', 1, &b);
4063 }
4064}
4065
4066static inline VALUE
4067num_minus(VALUE a, VALUE b)
4068{
4069 if (RB_INTEGER_TYPE_P(a)) {
4070 return rb_int_minus(a, b);
4071 }
4072 else if (RB_FLOAT_TYPE_P(a)) {
4073 return rb_float_minus(a, b);
4074 }
4075 else if (RB_TYPE_P(a, T_RATIONAL)) {
4076 return rb_rational_minus(a, b);
4077 }
4078 else {
4079 return rb_funcallv(a, '-', 1, &b);
4080 }
4081}
4082
4083static inline VALUE
4084num_mul(VALUE a, VALUE b)
4085{
4086 if (RB_INTEGER_TYPE_P(a)) {
4087 return rb_int_mul(a, b);
4088 }
4089 else if (RB_FLOAT_TYPE_P(a)) {
4090 return rb_float_mul(a, b);
4091 }
4092 else if (RB_TYPE_P(a, T_RATIONAL)) {
4093 return rb_rational_mul(a, b);
4094 }
4095 else {
4096 return rb_funcallv(a, '*', 1, &b);
4097 }
4098}
4099
4100static inline VALUE
4101num_idiv(VALUE a, VALUE b)
4102{
4103 VALUE q;
4104 if (RB_INTEGER_TYPE_P(a)) {
4105 q = rb_int_idiv(a, b);
4106 }
4107 else if (RB_FLOAT_TYPE_P(a)) {
4108 q = rb_float_div(a, b);
4109 }
4110 else if (RB_TYPE_P(a, T_RATIONAL)) {
4111 q = rb_rational_div(a, b);
4112 }
4113 else {
4114 q = rb_funcallv(a, idDiv, 1, &b);
4115 }
4116
4117 if (RB_INTEGER_TYPE_P(q)) {
4118 return q;
4119 }
4120 else if (RB_FLOAT_TYPE_P(q)) {
4121 return rb_float_floor(q, 0);
4122 }
4123 else if (RB_TYPE_P(q, T_RATIONAL)) {
4124 return rb_rational_floor(q, 0);
4125 }
4126 else {
4127 return rb_funcall(q, rb_intern("floor"), 0);
4128 }
4129}
4130
4131/*
4132 * call-seq:
4133 * aseq.last -> num or nil
4134 * aseq.last(n) -> an_array
4135 *
4136 * Returns the last number in this arithmetic sequence,
4137 * or an array of the last +n+ elements.
4138 */
4139static VALUE
4140arith_seq_last(int argc, VALUE *argv, VALUE self)
4141{
4142 VALUE b, e, s, len_1, len, last, nv, ary;
4143 int last_is_adjusted;
4144 long n;
4145
4146 e = arith_seq_end(self);
4147 if (NIL_P(e)) {
4148 rb_raise(rb_eRangeError,
4149 "cannot get the last element of endless arithmetic sequence");
4150 }
4151
4152 b = arith_seq_begin(self);
4153 s = arith_seq_step(self);
4154
4155 len_1 = num_idiv(num_minus(e, b), s);
4156 if (rb_num_negative_int_p(len_1)) {
4157 if (argc == 0) {
4158 return Qnil;
4159 }
4160 return rb_ary_new_capa(0);
4161 }
4162
4163 last = num_plus(b, num_mul(s, len_1));
4164 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4165 last = num_minus(last, s);
4166 }
4167
4168 if (argc == 0) {
4169 return last;
4170 }
4171
4172 if (last_is_adjusted) {
4173 len = len_1;
4174 }
4175 else {
4176 len = rb_int_plus(len_1, INT2FIX(1));
4177 }
4178
4179 rb_scan_args(argc, argv, "1", &nv);
4180 if (!RB_INTEGER_TYPE_P(nv)) {
4181 nv = rb_to_int(nv);
4182 }
4183 if (RTEST(rb_int_gt(nv, len))) {
4184 nv = len;
4185 }
4186 n = NUM2LONG(nv);
4187 if (n < 0) {
4188 rb_raise(rb_eArgError, "negative array size");
4189 }
4190
4191 ary = rb_ary_new_capa(n);
4192 b = rb_int_minus(last, rb_int_mul(s, nv));
4193 while (n) {
4194 b = rb_int_plus(b, s);
4195 rb_ary_push(ary, b);
4196 --n;
4197 }
4198
4199 return ary;
4200}
4201
4202/*
4203 * call-seq:
4204 * aseq.inspect -> string
4205 *
4206 * Convert this arithmetic sequence to a printable form.
4207 */
4208static VALUE
4209arith_seq_inspect(VALUE self)
4210{
4211 struct enumerator *e;
4212 VALUE eobj, str, eargs;
4213 int range_p;
4214
4215 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4216
4217 eobj = rb_attr_get(self, id_receiver);
4218 if (NIL_P(eobj)) {
4219 eobj = e->obj;
4220 }
4221
4222 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4223 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4224
4225 rb_str_buf_append(str, rb_id2str(e->meth));
4226
4227 eargs = rb_attr_get(eobj, id_arguments);
4228 if (NIL_P(eargs)) {
4229 eargs = e->args;
4230 }
4231 if (eargs != Qfalse) {
4232 long argc = RARRAY_LEN(eargs);
4233 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
4234
4235 if (argc > 0) {
4236 VALUE kwds = Qnil;
4237
4238 rb_str_buf_cat2(str, "(");
4239
4240 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
4241 int all_key = TRUE;
4242 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
4243 if (all_key) kwds = argv[--argc];
4244 }
4245
4246 while (argc--) {
4247 VALUE arg = *argv++;
4248
4249 rb_str_append(str, rb_inspect(arg));
4250 rb_str_buf_cat2(str, ", ");
4251 }
4252 if (!NIL_P(kwds)) {
4253 rb_hash_foreach(kwds, kwd_append, str);
4254 }
4255 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
4256 rb_str_buf_cat2(str, ")");
4257 }
4258 }
4259
4260 rb_str_buf_cat2(str, ")");
4261
4262 return str;
4263}
4264
4265/*
4266 * call-seq:
4267 * aseq == obj -> true or false
4268 *
4269 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4270 * has equivalent begin, end, step, and exclude_end? settings.
4271 */
4272static VALUE
4273arith_seq_eq(VALUE self, VALUE other)
4274{
4275 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4276 return Qfalse;
4277 }
4278
4279 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4280 return Qfalse;
4281 }
4282
4283 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4284 return Qfalse;
4285 }
4286
4287 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4288 return Qfalse;
4289 }
4290
4291 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4292 return Qfalse;
4293 }
4294
4295 return Qtrue;
4296}
4297
4298/*
4299 * call-seq:
4300 * aseq.hash -> integer
4301 *
4302 * Compute a hash-value for this arithmetic sequence.
4303 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4304 * values will generate the same hash-value.
4305 *
4306 * See also Object#hash.
4307 */
4308static VALUE
4309arith_seq_hash(VALUE self)
4310{
4311 st_index_t hash;
4312 VALUE v;
4313
4314 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4315 v = rb_hash(arith_seq_begin(self));
4316 hash = rb_hash_uint(hash, NUM2LONG(v));
4317 v = rb_hash(arith_seq_end(self));
4318 hash = rb_hash_uint(hash, NUM2LONG(v));
4319 v = rb_hash(arith_seq_step(self));
4320 hash = rb_hash_uint(hash, NUM2LONG(v));
4321 hash = rb_hash_end(hash);
4322
4323 return ST2FIX(hash);
4324}
4325
4326#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4327
4329 VALUE current;
4330 VALUE end;
4331 VALUE step;
4332 int excl;
4333};
4334
4335/*
4336 * call-seq:
4337 * aseq.each {|i| block } -> aseq
4338 * aseq.each -> aseq
4339 */
4340static VALUE
4341arith_seq_each(VALUE self)
4342{
4343 VALUE c, e, s, len_1, last;
4344 int x;
4345
4346 if (!rb_block_given_p()) return self;
4347
4348 c = arith_seq_begin(self);
4349 e = arith_seq_end(self);
4350 s = arith_seq_step(self);
4351 x = arith_seq_exclude_end_p(self);
4352
4353 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4354 return self;
4355 }
4356
4357 if (NIL_P(e)) {
4358 while (1) {
4359 rb_yield(c);
4360 c = rb_int_plus(c, s);
4361 }
4362
4363 return self;
4364 }
4365
4366 if (rb_equal(s, INT2FIX(0))) {
4367 while (1) {
4368 rb_yield(c);
4369 }
4370
4371 return self;
4372 }
4373
4374 len_1 = num_idiv(num_minus(e, c), s);
4375 last = num_plus(c, num_mul(s, len_1));
4376 if (x && rb_equal(last, e)) {
4377 last = num_minus(last, s);
4378 }
4379
4380 if (rb_num_negative_int_p(s)) {
4381 while (NUM_GE(c, last)) {
4382 rb_yield(c);
4383 c = num_plus(c, s);
4384 }
4385 }
4386 else {
4387 while (NUM_GE(last, c)) {
4388 rb_yield(c);
4389 c = num_plus(c, s);
4390 }
4391 }
4392
4393 return self;
4394}
4395
4396/*
4397 * call-seq:
4398 * aseq.size -> num or nil
4399 *
4400 * Returns the number of elements in this arithmetic sequence if it is a finite
4401 * sequence. Otherwise, returns <code>nil</code>.
4402 */
4403static VALUE
4404arith_seq_size(VALUE self)
4405{
4406 VALUE b, e, s, len_1, len, last;
4407 int x;
4408
4409 b = arith_seq_begin(self);
4410 e = arith_seq_end(self);
4411 s = arith_seq_step(self);
4412 x = arith_seq_exclude_end_p(self);
4413
4414 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4415 double ee, n;
4416
4417 if (NIL_P(e)) {
4418 if (rb_num_negative_int_p(s)) {
4419 ee = -HUGE_VAL;
4420 }
4421 else {
4422 ee = HUGE_VAL;
4423 }
4424 }
4425 else {
4426 ee = NUM2DBL(e);
4427 }
4428
4429 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4430 if (isinf(n)) return DBL2NUM(n);
4431 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4432 return rb_dbl2big(n);
4433 }
4434
4435 if (NIL_P(e)) {
4436 return DBL2NUM(HUGE_VAL);
4437 }
4438
4439 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4440 s = rb_to_int(s);
4441 }
4442
4443 if (rb_equal(s, INT2FIX(0))) {
4444 return DBL2NUM(HUGE_VAL);
4445 }
4446
4447 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4448 if (rb_num_negative_int_p(len_1)) {
4449 return INT2FIX(0);
4450 }
4451
4452 last = rb_int_plus(b, rb_int_mul(s, len_1));
4453 if (x && rb_equal(last, e)) {
4454 len = len_1;
4455 }
4456 else {
4457 len = rb_int_plus(len_1, INT2FIX(1));
4458 }
4459
4460 return len;
4461}
4462
4463#define sym(name) ID2SYM(rb_intern_const(name))
4464void
4465InitVM_Enumerator(void)
4466{
4467 ID id_private = rb_intern_const("private");
4468
4469 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4470 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4471
4472 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4474
4475 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4476 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4477 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4478 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4479 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4480 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4481 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4482 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4483 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4484 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4485 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4486 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4487 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4488 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4489 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4490 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4491 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4493
4494 /* Lazy */
4496 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4497
4498 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4499 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4500 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4501 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4502 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4503 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4504 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4505 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4506 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4507 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4508 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4509 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4510 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4511 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4512 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4513 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4514 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4515 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4516
4517 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4518 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4519 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4520 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4521 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4522 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4523 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4524 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4525 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4526 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4527 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4528 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4529 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4530 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4531 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4532 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4533 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4534
4535 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4536 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4537 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4538 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4539 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4540 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4541 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4542 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4543 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4544 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4545 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4546 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4547 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4548 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4549 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4550 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4551 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4552 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4553 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4554 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4555 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4556 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4557 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4558 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4559 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4560 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4561 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4562 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4563 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4564
4565 lazy_use_super_method = rb_hash_new_with_size(18);
4566 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4567 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4568 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4569 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4570 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4571 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4572 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4573 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4574 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4575 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4576 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4577 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4578 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4579 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4580 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4581 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4582 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4583 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4584 rb_obj_freeze(lazy_use_super_method);
4585 rb_vm_register_global_object(lazy_use_super_method);
4586
4587#if 0 /* for RDoc */
4588 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4589 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4590 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4591 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4592 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4593 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4594#endif
4595 rb_define_alias(rb_cLazy, "force", "to_a");
4596
4598 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4599
4600 /* Generator */
4601 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4602 rb_include_module(rb_cGenerator, rb_mEnumerable);
4603 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4604 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4605 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4606 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4607
4608 /* Yielder */
4609 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4610 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4611 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4612 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4613 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4614 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4615
4616 /* Producer */
4617 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4618 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4619 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4620 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4621
4622 /* Chain */
4623 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4624 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4625 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4626 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4627 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4628 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4629 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4630 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4631 rb_undef_method(rb_cEnumChain, "feed");
4632 rb_undef_method(rb_cEnumChain, "next");
4633 rb_undef_method(rb_cEnumChain, "next_values");
4634 rb_undef_method(rb_cEnumChain, "peek");
4635 rb_undef_method(rb_cEnumChain, "peek_values");
4636
4637 /* Product */
4638 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4639 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4640 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4641 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4642 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4643 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4644 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4645 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4646 rb_undef_method(rb_cEnumProduct, "feed");
4647 rb_undef_method(rb_cEnumProduct, "next");
4648 rb_undef_method(rb_cEnumProduct, "next_values");
4649 rb_undef_method(rb_cEnumProduct, "peek");
4650 rb_undef_method(rb_cEnumProduct, "peek_values");
4651 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4652
4653 /* ArithmeticSequence */
4654 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4655 rb_undef_alloc_func(rb_cArithSeq);
4656 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4657 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4658 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4659 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4660 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4661 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4662 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4663 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4664 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4665 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4666 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4667 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4668 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4669 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4670
4671 rb_provide("enumerator.so"); /* for backward compatibility */
4672}
4673#undef sym
4674
4675void
4676Init_Enumerator(void)
4677{
4678 id_rewind = rb_intern_const("rewind");
4679 id_next = rb_intern_const("next");
4680 id_result = rb_intern_const("result");
4681 id_receiver = rb_intern_const("receiver");
4682 id_arguments = rb_intern_const("arguments");
4683 id_memo = rb_intern_const("memo");
4684 id_method = rb_intern_const("method");
4685 id_force = rb_intern_const("force");
4686 id_to_enum = rb_intern_const("to_enum");
4687 id_each_entry = rb_intern_const("each_entry");
4688 sym_each = ID2SYM(id_each);
4689 sym_yield = ID2SYM(rb_intern_const("yield"));
4690
4691 InitVM(Enumerator);
4692}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1691
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:1474
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:1510
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2843
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:1057
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2663
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3133
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:1049
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:1036
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1682
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:206
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:659
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:405
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:657
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:406
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:682
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:2111
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1434
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1430
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1428
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:180
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1481
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1432
VALUE rb_mKernel
Kernel module.
Definition object.c:61
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:163
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:101
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:196
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:243
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:551
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:655
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:175
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:878
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1297
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3221
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1117
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1194
#define rb_funcall2
Definition eval.h:205
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:362
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_resize(VALUE ary, long len)
Expands or shrinks the passed array to the passed length.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:206
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:767
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:484
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2134
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:847
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1029
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:990
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:120
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1838
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:942
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:945
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3757
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:1956
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3723
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3347
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1762
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2080
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1460
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:379
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3309
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1602
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:686
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:680
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:284
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:1005
ID rb_to_id(VALUE str)
Definition string.c:12566
int len
Length of the buffer.
Definition io.h:8
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1384
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1406
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1372
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1412
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1559
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:372
#define ALLOCA_N(type, n)
Definition memory.h:292
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
#define RUBY_TYPED_DEFAULT_FREE
This is a value you can set to rb_data_type_struct::dfree.
Definition rtypeddata.h:79
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:515
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:497
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:231
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:106
Definition enumerator.c:235
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:203
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:312
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:264
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376