Ruby 4.1.0dev (2026-03-01 revision d68e4be1873e364c5ee24ed112bce4bc86e3a406)
enumerator.c (d68e4be1873e364c5ee24ed112bce4bc86e3a406)
1/************************************************
2
3 enumerator.c - provides Enumerator class
4
5 $Author$
6
7 Copyright (C) 2001-2003 Akinori MUSHA
8
9 $Idaemons: /home/cvs/rb/enumerator/enumerator.c,v 1.1.1.1 2001/07/15 10:12:48 knu Exp $
10 $RoughId: enumerator.c,v 1.6 2003/07/27 11:03:24 nobu Exp $
11 $Id$
12
13************************************************/
14
15#include "ruby/internal/config.h"
16
17#ifdef HAVE_FLOAT_H
18#include <float.h>
19#endif
20
21#include <limits.h>
22#include "id.h"
23#include "internal.h"
24#include "internal/class.h"
25#include "internal/enumerator.h"
26#include "internal/error.h"
27#include "internal/hash.h"
28#include "internal/imemo.h"
29#include "internal/numeric.h"
30#include "internal/range.h"
31#include "internal/rational.h"
32#include "ruby/ruby.h"
33
34/*
35 * Document-class: Enumerator
36 *
37 * \Class \Enumerator supports:
38 *
39 * - {External iteration}[rdoc-ref:Enumerator@External+Iteration].
40 * - {Internal iteration}[rdoc-ref:Enumerator@Internal+Iteration].
41 *
42 * An \Enumerator may be created by the following methods:
43 *
44 * - Object#to_enum.
45 * - Object#enum_for.
46 * - Enumerator.new.
47 *
48 * In addition, certain Ruby methods return \Enumerator objects:
49 * a Ruby iterator method that accepts a block
50 * may return an \Enumerator if no block is given.
51 * There are many such methods, for example, in classes Array and Hash.
52 * (In the documentation for those classes, search for `new_enumerator`.)
53 *
54 * == Internal Iteration
55 *
56 * In _internal iteration_, an iterator method drives the iteration
57 * and the caller's block handles the processing;
58 * this example uses method #each_with_index:
59 *
60 * words = %w[foo bar baz] # => ["foo", "bar", "baz"]
61 * enumerator = words.each # => #<Enumerator: ...>
62 * enumerator.each_with_index {|word, i| puts "#{i}: #{word}" }
63 * 0: foo
64 * 1: bar
65 * 2: baz
66 *
67 * Iterator methods in class \Enumerator include:
68 *
69 * - #each:
70 * passes each item to the block.
71 * - #each_with_index:
72 * passes each item and its index to the block.
73 * - #each_with_object (aliased as #with_object):
74 * passes each item and a given object to the block.
75 * - #with_index:
76 * like #each_with_index, but starting at a given offset (instead of zero).
77 *
78 * \Class \Enumerator includes module Enumerable,
79 * which provides many more iterator methods.
80 *
81 * == External Iteration
82 *
83 * In _external iteration_, the user's program both drives the iteration
84 * and handles the processing in stream-like fashion;
85 * this example uses method #next:
86 *
87 * words = %w[foo bar baz]
88 * enumerator = words.each
89 * enumerator.next # => "foo"
90 * enumerator.next # => "bar"
91 * enumerator.next # => "baz"
92 * enumerator.next # Raises StopIteration: iteration reached an end
93 *
94 * External iteration methods in class \Enumerator include:
95 *
96 * - #feed:
97 * sets the value that is next to be returned.
98 * - #next:
99 * returns the next value and increments the position.
100 * - #next_values:
101 * returns the next value in a 1-element array and increments the position.
102 * - #peek:
103 * returns the next value but does not increment the position.
104 * - #peek_values:
105 * returns the next value in a 1-element array but does not increment the position.
106 * - #rewind:
107 * sets the position to zero.
108 *
109 * Each of these methods raises FrozenError if called from a frozen \Enumerator.
110 *
111 * == External Iteration and \Fiber
112 *
113 * External iteration that uses Fiber differs *significantly* from internal iteration:
114 *
115 * - Using \Fiber adds some overhead compared to internal enumeration.
116 * - The stacktrace will only include the stack from the \Enumerator, not above.
117 * - \Fiber-local variables are *not* inherited inside the \Enumerator \Fiber,
118 * which instead starts with no \Fiber-local variables.
119 * - \Fiber storage variables *are* inherited and are designed
120 * to handle \Enumerator Fibers. Assigning to a \Fiber storage variable
121 * only affects the current \Fiber, so if you want to change state
122 * in the caller \Fiber of the \Enumerator \Fiber, you need to use an
123 * extra indirection (e.g., use some object in the \Fiber storage
124 * variable and mutate some ivar of it).
125 *
126 * Concretely:
127 *
128 * Thread.current[:fiber_local] = 1
129 * Fiber[:storage_var] = 1
130 * e = Enumerator.new do |y|
131 * p Thread.current[:fiber_local] # for external iteration: nil, for internal iteration: 1
132 * p Fiber[:storage_var] # => 1, inherited
133 * Fiber[:storage_var] += 1
134 * y << 42
135 * end
136 *
137 * p e.next # => 42
138 * p Fiber[:storage_var] # => 1 (it ran in a different Fiber)
139 *
140 * e.each { p _1 }
141 * p Fiber[:storage_var] # => 2 (it ran in the same Fiber/"stack" as the current Fiber)
142 *
143 * == Converting External Iteration to Internal Iteration
144 *
145 * You can use an external iterator to implement an internal iterator as follows:
146 *
147 * def ext_each(e)
148 * while true
149 * begin
150 * vs = e.next_values
151 * rescue StopIteration
152 * return $!.result
153 * end
154 * y = yield(*vs)
155 * e.feed y
156 * end
157 * end
158 *
159 * o = Object.new
160 *
161 * def o.each
162 * puts yield
163 * puts yield(1)
164 * puts yield(1, 2)
165 * 3
166 * end
167 *
168 * # use o.each as an internal iterator directly.
169 * puts o.each {|*x| puts x; [:b, *x] }
170 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
171 *
172 * # convert o.each to an external iterator for
173 * # implementing an internal iterator.
174 * puts ext_each(o.to_enum) {|*x| puts x; [:b, *x] }
175 * # => [], [:b], [1], [:b, 1], [1, 2], [:b, 1, 2], 3
176 *
177 */
179static VALUE rb_cLazy;
180static ID id_rewind, id_to_enum, id_each_entry;
181static ID id_next, id_result, id_receiver, id_arguments, id_memo, id_method, id_force;
182static VALUE sym_each, sym_yield;
183
184static VALUE lazy_use_super_method;
185
186extern ID ruby_static_id_cause;
187
188#define id_call idCall
189#define id_cause ruby_static_id_cause
190#define id_each idEach
191#define id_eqq idEqq
192#define id_initialize idInitialize
193#define id_size idSize
194
196
198 VALUE obj;
199 ID meth;
200 VALUE args;
201 VALUE fib;
202 VALUE dst;
203 VALUE lookahead;
204 VALUE feedvalue;
205 VALUE stop_exc;
206 VALUE size;
207 VALUE procs;
209 int kw_splat;
210};
211
212RUBY_REFERENCES(enumerator_refs) = {
213 RUBY_REF_EDGE(struct enumerator, obj),
214 RUBY_REF_EDGE(struct enumerator, args),
215 RUBY_REF_EDGE(struct enumerator, fib),
216 RUBY_REF_EDGE(struct enumerator, dst),
217 RUBY_REF_EDGE(struct enumerator, lookahead),
218 RUBY_REF_EDGE(struct enumerator, feedvalue),
219 RUBY_REF_EDGE(struct enumerator, stop_exc),
220 RUBY_REF_EDGE(struct enumerator, size),
221 RUBY_REF_EDGE(struct enumerator, procs),
222 RUBY_REF_END
223};
224
225static VALUE rb_cGenerator, rb_cYielder, rb_cEnumProducer;
226
227struct generator {
228 VALUE proc;
229 VALUE obj;
230};
231
232struct yielder {
233 VALUE proc;
234};
235
236struct producer {
237 VALUE init;
238 VALUE proc;
239 VALUE size;
240};
241
242typedef struct MEMO *lazyenum_proc_func(VALUE, struct MEMO *, VALUE, long);
243typedef VALUE lazyenum_size_func(VALUE, VALUE);
244typedef int lazyenum_precheck_func(VALUE proc_entry);
245typedef struct {
246 lazyenum_proc_func *proc;
247 lazyenum_size_func *size;
248 lazyenum_precheck_func *precheck;
250
252 VALUE proc;
253 VALUE memo;
254 const lazyenum_funcs *fn;
255};
256
257static VALUE generator_allocate(VALUE klass);
258static VALUE generator_init(VALUE obj, VALUE proc);
259
260static VALUE rb_cEnumChain;
261
263 VALUE enums;
264 long pos;
265};
266
267static VALUE rb_cEnumProduct;
268
270 VALUE enums;
271};
272
273VALUE rb_cArithSeq;
274
275static const rb_data_type_t enumerator_data_type = {
276 "enumerator",
277 {
278 RUBY_REFS_LIST_PTR(enumerator_refs),
280 NULL, // Nothing allocated externally, so don't need a memsize function
281 NULL,
282 },
283 0, NULL, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
284};
285
286static struct enumerator *
287enumerator_ptr(VALUE obj)
288{
289 struct enumerator *ptr;
290
291 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr);
292 if (!ptr || UNDEF_P(ptr->obj)) {
293 rb_raise(rb_eArgError, "uninitialized enumerator");
294 }
295 return ptr;
296}
297
298static void
299proc_entry_mark_and_move(void *p)
300{
301 struct proc_entry *ptr = p;
302 rb_gc_mark_and_move(&ptr->proc);
303 rb_gc_mark_and_move(&ptr->memo);
304}
305
306static const rb_data_type_t proc_entry_data_type = {
307 "proc_entry",
308 {
309 proc_entry_mark_and_move,
311 NULL, // Nothing allocated externally, so don't need a memsize function
312 proc_entry_mark_and_move,
313 },
314 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
315};
316
317static struct proc_entry *
318proc_entry_ptr(VALUE proc_entry)
319{
320 struct proc_entry *ptr;
321
322 TypedData_Get_Struct(proc_entry, struct proc_entry, &proc_entry_data_type, ptr);
323
324 return ptr;
325}
326
327/*
328 * call-seq:
329 * obj.to_enum(method = :each, *args) -> enum
330 * obj.enum_for(method = :each, *args) -> enum
331 * obj.to_enum(method = :each, *args) {|*args| block} -> enum
332 * obj.enum_for(method = :each, *args){|*args| block} -> enum
333 *
334 * Creates a new Enumerator which will enumerate by calling +method+ on
335 * +obj+, passing +args+ if any. What was _yielded_ by method becomes
336 * values of enumerator.
337 *
338 * If a block is given, it will be used to calculate the size of
339 * the enumerator without the need to iterate it (see Enumerator#size).
340 *
341 * === Examples
342 *
343 * str = "xyz"
344 *
345 * enum = str.enum_for(:each_byte)
346 * enum.each { |b| puts b }
347 * # => 120
348 * # => 121
349 * # => 122
350 *
351 * # protect an array from being modified by some_method
352 * a = [1, 2, 3]
353 * some_method(a.to_enum)
354 *
355 * # String#split in block form is more memory-effective:
356 * very_large_string.split("|") { |chunk| return chunk if chunk.include?('DATE') }
357 * # This could be rewritten more idiomatically with to_enum:
358 * very_large_string.to_enum(:split, "|").lazy.grep(/DATE/).first
359 *
360 * It is typical to call to_enum when defining methods for
361 * a generic Enumerable, in case no block is passed.
362 *
363 * Here is such an example, with parameter passing and a sizing block:
364 *
365 * module Enumerable
366 * # a generic method to repeat the values of any enumerable
367 * def repeat(n)
368 * raise ArgumentError, "#{n} is negative!" if n < 0
369 * unless block_given?
370 * return to_enum(__method__, n) do # __method__ is :repeat here
371 * sz = size # Call size and multiply by n...
372 * sz * n if sz # but return nil if size itself is nil
373 * end
374 * end
375 * each do |*val|
376 * n.times { yield *val }
377 * end
378 * end
379 * end
380 *
381 * %i[hello world].repeat(2) { |w| puts w }
382 * # => Prints 'hello', 'hello', 'world', 'world'
383 * enum = (1..14).repeat(3)
384 * # => returns an Enumerator when called without a block
385 * enum.first(4) # => [1, 1, 1, 2]
386 * enum.size # => 42
387 */
388static VALUE
389obj_to_enum(int argc, VALUE *argv, VALUE obj)
390{
391 VALUE enumerator, meth = sym_each;
392
393 if (argc > 0) {
394 --argc;
395 meth = *argv++;
396 }
397 enumerator = rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
398 if (rb_block_given_p()) {
399 RB_OBJ_WRITE(enumerator, &enumerator_ptr(enumerator)->size, rb_block_proc());
400 }
401 return enumerator;
402}
403
404static VALUE
405enumerator_allocate(VALUE klass)
406{
407 struct enumerator *ptr;
408 VALUE enum_obj;
409
410 enum_obj = TypedData_Make_Struct(klass, struct enumerator, &enumerator_data_type, ptr);
411 ptr->obj = Qundef;
412
413 return enum_obj;
414}
415
416static VALUE
417enumerator_init(VALUE enum_obj, VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, VALUE size, int kw_splat)
418{
419 struct enumerator *ptr;
420
421 rb_check_frozen(enum_obj);
422 TypedData_Get_Struct(enum_obj, struct enumerator, &enumerator_data_type, ptr);
423
424 if (!ptr) {
425 rb_raise(rb_eArgError, "unallocated enumerator");
426 }
427
428 RB_OBJ_WRITE(enum_obj, &ptr->obj, obj);
429 ptr->meth = rb_to_id(meth);
430 if (argc) RB_OBJ_WRITE(enum_obj, &ptr->args, rb_ary_new4(argc, argv));
431 ptr->fib = 0;
432 ptr->dst = Qnil;
433 ptr->lookahead = Qundef;
434 ptr->feedvalue = Qundef;
435 ptr->stop_exc = Qfalse;
436 RB_OBJ_WRITE(enum_obj, &ptr->size, size);
437 ptr->size_fn = size_fn;
438 ptr->kw_splat = kw_splat;
439
440 return enum_obj;
441}
442
443static VALUE
444convert_to_feasible_size_value(VALUE obj)
445{
446 if (NIL_P(obj)) {
447 return obj;
448 }
449 else if (rb_respond_to(obj, id_call)) {
450 return obj;
451 }
452 else if (RB_FLOAT_TYPE_P(obj) && RFLOAT_VALUE(obj) == HUGE_VAL) {
453 return obj;
454 }
455 else {
456 return rb_to_int(obj);
457 }
458}
459
460/*
461 * call-seq:
462 * Enumerator.new(size = nil) {|yielder| ... }
463 *
464 * Returns a new \Enumerator object that can be used for iteration.
465 *
466 * The given block defines the iteration;
467 * it is called with a "yielder" object that can yield an object
468 * via a call to method <tt>yielder.yield</tt>:
469 *
470 * fib = Enumerator.new do |yielder|
471 * n = next_n = 1
472 * while true do
473 * yielder.yield(n)
474 * n, next_n = next_n, n + next_n
475 * end
476 * end
477 *
478 * fib.take(10) # => [1, 1, 2, 3, 5, 8, 13, 21, 34, 55]
479 *
480 * Parameter +size+ specifies how the size is to be calculated (see #size);
481 * it can either be a value or a callable object:
482 *
483 * Enumerator.new{}.size # => nil
484 * Enumerator.new(42){}.size # => 42
485 * Enumerator.new(-> {42}){}.size # => 42
486 *
487 */
488static VALUE
489enumerator_initialize(int argc, VALUE *argv, VALUE obj)
490{
491 VALUE iter = rb_block_proc();
492 VALUE recv = generator_init(generator_allocate(rb_cGenerator), iter);
493 VALUE arg0 = rb_check_arity(argc, 0, 1) ? argv[0] : Qnil;
494 VALUE size = convert_to_feasible_size_value(arg0);
495
496 return enumerator_init(obj, recv, sym_each, 0, 0, 0, size, false);
497}
498
499/* :nodoc: */
500static VALUE
501enumerator_init_copy(VALUE obj, VALUE orig)
502{
503 struct enumerator *ptr0, *ptr1;
504
505 if (!OBJ_INIT_COPY(obj, orig)) return obj;
506 ptr0 = enumerator_ptr(orig);
507 if (ptr0->fib) {
508 /* Fibers cannot be copied */
509 rb_raise(rb_eTypeError, "can't copy execution context");
510 }
511
512 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, ptr1);
513
514 if (!ptr1) {
515 rb_raise(rb_eArgError, "unallocated enumerator");
516 }
517
518 RB_OBJ_WRITE(obj, &ptr1->obj, ptr0->obj);
519 ptr1->meth = ptr0->meth;
520 RB_OBJ_WRITE(obj, &ptr1->args, ptr0->args);
521 ptr1->fib = 0;
522 ptr1->lookahead = Qundef;
523 ptr1->feedvalue = Qundef;
524 RB_OBJ_WRITE(obj, &ptr1->size, ptr0->size);
525 ptr1->size_fn = ptr0->size_fn;
526
527 return obj;
528}
529
530/*
531 * For backwards compatibility; use rb_enumeratorize_with_size
532 */
533VALUE
534rb_enumeratorize(VALUE obj, VALUE meth, int argc, const VALUE *argv)
535{
536 return rb_enumeratorize_with_size(obj, meth, argc, argv, 0);
537}
538
539static VALUE lazy_to_enum_i(VALUE self, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat);
540static int lazy_precheck(VALUE procs);
541
542VALUE
543rb_enumeratorize_with_size_kw(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
544{
545 VALUE base_class = rb_cEnumerator;
546
547 if (RTEST(rb_obj_is_kind_of(obj, rb_cLazy))) {
548 base_class = rb_cLazy;
549 }
550 else if (RTEST(rb_obj_is_kind_of(obj, rb_cEnumChain))) {
551 obj = enumerator_init(enumerator_allocate(rb_cEnumerator), obj, sym_each, 0, 0, 0, Qnil, false);
552 }
553
554 return enumerator_init(enumerator_allocate(base_class),
555 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
556}
557
558VALUE
559rb_enumeratorize_with_size(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn)
560{
561 return rb_enumeratorize_with_size_kw(obj, meth, argc, argv, size_fn, rb_keyword_given_p());
562}
563
564static VALUE
565enumerator_block_call(VALUE obj, rb_block_call_func *func, VALUE arg)
566{
567 int argc = 0;
568 const VALUE *argv = 0;
569 const struct enumerator *e = enumerator_ptr(obj);
570 ID meth = e->meth;
571
572 VALUE args = e->args;
573 if (args) {
574 argc = RARRAY_LENINT(args);
575 argv = RARRAY_CONST_PTR(args);
576 }
577
578 VALUE ret = rb_block_call_kw(e->obj, meth, argc, argv, func, arg, e->kw_splat);
579
580 RB_GC_GUARD(args);
581
582 return ret;
583}
584
585/*
586 * call-seq:
587 * enum.each { |elm| block } -> obj
588 * enum.each -> enum
589 * enum.each(*appending_args) { |elm| block } -> obj
590 * enum.each(*appending_args) -> an_enumerator
591 *
592 * Iterates over the block according to how this Enumerator was constructed.
593 * If no block and no arguments are given, returns self.
594 *
595 * === Examples
596 *
597 * "Hello, world!".scan(/\w+/) #=> ["Hello", "world"]
598 * "Hello, world!".to_enum(:scan, /\w+/).to_a #=> ["Hello", "world"]
599 * "Hello, world!".to_enum(:scan).each(/\w+/).to_a #=> ["Hello", "world"]
600 *
601 * obj = Object.new
602 *
603 * def obj.each_arg(a, b=:b, *rest)
604 * yield a
605 * yield b
606 * yield rest
607 * :method_returned
608 * end
609 *
610 * enum = obj.to_enum :each_arg, :a, :x
611 *
612 * enum.each.to_a #=> [:a, :x, []]
613 * enum.each.equal?(enum) #=> true
614 * enum.each { |elm| elm } #=> :method_returned
615 *
616 * enum.each(:y, :z).to_a #=> [:a, :x, [:y, :z]]
617 * enum.each(:y, :z).equal?(enum) #=> false
618 * enum.each(:y, :z) { |elm| elm } #=> :method_returned
619 *
620 */
621static VALUE
622enumerator_each(int argc, VALUE *argv, VALUE obj)
623{
624 struct enumerator *e = enumerator_ptr(obj);
625
626 if (argc > 0) {
627 VALUE args = (e = enumerator_ptr(obj = rb_obj_dup(obj)))->args;
628 if (args) {
629#if SIZEOF_INT < SIZEOF_LONG
630 /* check int range overflow */
631 rb_long2int(RARRAY_LEN(args) + argc);
632#endif
633 args = rb_ary_dup(args);
634 rb_ary_cat(args, argv, argc);
635 }
636 else {
637 args = rb_ary_new4(argc, argv);
638 }
639 RB_OBJ_WRITE(obj, &e->args, args);
640 e->size = Qnil;
641 e->size_fn = 0;
642 }
643 if (!rb_block_given_p()) return obj;
644
645 if (!lazy_precheck(e->procs)) return Qnil;
646
647 return enumerator_block_call(obj, 0, obj);
648}
649
650static VALUE
651enumerator_with_index_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
652{
653 struct MEMO *memo = (struct MEMO *)m;
654 VALUE idx = memo->v1;
655 MEMO_V1_SET(memo, rb_int_succ(idx));
656
657 if (argc <= 1)
658 return rb_yield_values(2, val, idx);
659
660 return rb_yield_values(2, rb_ary_new4(argc, argv), idx);
661}
662
663static VALUE
664enumerator_size(VALUE obj);
665
666static VALUE
667enumerator_enum_size(VALUE obj, VALUE args, VALUE eobj)
668{
669 return enumerator_size(obj);
670}
671
672/*
673 * call-seq:
674 * e.with_index(offset = 0) {|(*args), idx| ... }
675 * e.with_index(offset = 0)
676 *
677 * Iterates the given block for each element with an index, which
678 * starts from +offset+. If no block is given, returns a new Enumerator
679 * that includes the index, starting from +offset+
680 *
681 * +offset+:: the starting index to use
682 *
683 */
684static VALUE
685enumerator_with_index(int argc, VALUE *argv, VALUE obj)
686{
687 VALUE memo;
688
689 rb_check_arity(argc, 0, 1);
690 RETURN_SIZED_ENUMERATOR(obj, argc, argv, enumerator_enum_size);
691 memo = (!argc || NIL_P(memo = argv[0])) ? INT2FIX(0) : rb_to_int(memo);
692 return enumerator_block_call(obj, enumerator_with_index_i, (VALUE)rb_imemo_memo_new(memo, 0, 0));
693}
694
695/*
696 * call-seq:
697 * e.each_with_index {|(*args), idx| ... }
698 * e.each_with_index
699 *
700 * Same as Enumerator#with_index(0), i.e. there is no starting offset.
701 *
702 * If no block is given, a new Enumerator is returned that includes the index.
703 *
704 */
705static VALUE
706enumerator_each_with_index(VALUE obj)
707{
708 return enumerator_with_index(0, NULL, obj);
709}
710
711static VALUE
712enumerator_with_object_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, memo))
713{
714 if (argc <= 1)
715 return rb_yield_values(2, val, memo);
716
717 return rb_yield_values(2, rb_ary_new4(argc, argv), memo);
718}
719
720/*
721 * call-seq:
722 * e.each_with_object(obj) {|(*args), obj| ... }
723 * e.each_with_object(obj)
724 * e.with_object(obj) {|(*args), obj| ... }
725 * e.with_object(obj)
726 *
727 * Iterates the given block for each element with an arbitrary object, +obj+,
728 * and returns +obj+
729 *
730 * If no block is given, returns a new Enumerator.
731 *
732 * === Example
733 *
734 * to_three = Enumerator.new do |y|
735 * 3.times do |x|
736 * y << x
737 * end
738 * end
739 *
740 * to_three_with_string = to_three.with_object("foo")
741 * to_three_with_string.each do |x,string|
742 * puts "#{string}: #{x}"
743 * end
744 *
745 * # => foo: 0
746 * # => foo: 1
747 * # => foo: 2
748 */
749static VALUE
750enumerator_with_object(VALUE obj, VALUE memo)
751{
752 RETURN_SIZED_ENUMERATOR(obj, 1, &memo, enumerator_enum_size);
753 enumerator_block_call(obj, enumerator_with_object_i, memo);
754
755 return memo;
756}
757
758static VALUE
759next_ii(RB_BLOCK_CALL_FUNC_ARGLIST(i, obj))
760{
761 struct enumerator *e = enumerator_ptr(obj);
762 VALUE feedvalue = Qnil;
763 VALUE args = rb_ary_new4(argc, argv);
764 rb_fiber_yield(1, &args);
765 if (!UNDEF_P(e->feedvalue)) {
766 feedvalue = e->feedvalue;
767 e->feedvalue = Qundef;
768 }
769 return feedvalue;
770}
771
772static VALUE
773next_i(RB_BLOCK_CALL_FUNC_ARGLIST(_, obj))
774{
775 struct enumerator *e = enumerator_ptr(obj);
776 VALUE nil = Qnil;
777 VALUE result;
778
779 result = rb_block_call(obj, id_each, 0, 0, next_ii, obj);
780 RB_OBJ_WRITE(obj, &e->stop_exc, rb_exc_new2(rb_eStopIteration, "iteration reached an end"));
781 rb_ivar_set(e->stop_exc, id_result, result);
782 return rb_fiber_yield(1, &nil);
783}
784
785static void
786next_init(VALUE obj, struct enumerator *e)
787{
788 VALUE curr = rb_fiber_current();
789 RB_OBJ_WRITE(obj, &e->dst, curr);
790 RB_OBJ_WRITE(obj, &e->fib, rb_fiber_new(next_i, obj));
791 e->lookahead = Qundef;
792}
793
794static VALUE
795get_next_values(VALUE obj, struct enumerator *e)
796{
797 VALUE curr, vs;
798
799 if (e->stop_exc) {
800 VALUE exc = e->stop_exc;
801 VALUE result = rb_attr_get(exc, id_result);
802 VALUE mesg = rb_attr_get(exc, idMesg);
803 if (!NIL_P(mesg)) mesg = rb_str_dup(mesg);
804 VALUE stop_exc = rb_exc_new_str(rb_eStopIteration, mesg);
805 rb_ivar_set(stop_exc, id_cause, exc);
806 rb_ivar_set(stop_exc, id_result, result);
807 rb_exc_raise(stop_exc);
808 }
809
810 curr = rb_fiber_current();
811
812 if (!e->fib || !rb_fiber_alive_p(e->fib)) {
813 next_init(obj, e);
814 }
815
816 vs = rb_fiber_resume(e->fib, 1, &curr);
817 if (e->stop_exc) {
818 e->fib = 0;
819 e->dst = Qnil;
820 e->lookahead = Qundef;
821 e->feedvalue = Qundef;
822 rb_exc_raise(e->stop_exc);
823 }
824 return vs;
825}
826
827/*
828 * call-seq:
829 * e.next_values -> array
830 *
831 * Returns the next object as an array in the enumerator, and move the
832 * internal position forward. When the position reached at the end,
833 * StopIteration is raised.
834 *
835 * See class-level notes about external iterators.
836 *
837 * This method can be used to distinguish <code>yield</code> and <code>yield
838 * nil</code>.
839 *
840 * === Example
841 *
842 * o = Object.new
843 * def o.each
844 * yield
845 * yield 1
846 * yield 1, 2
847 * yield nil
848 * yield [1, 2]
849 * end
850 * e = o.to_enum
851 * p e.next_values
852 * p e.next_values
853 * p e.next_values
854 * p e.next_values
855 * p e.next_values
856 * e = o.to_enum
857 * p e.next
858 * p e.next
859 * p e.next
860 * p e.next
861 * p e.next
862 *
863 * ## yield args next_values next
864 * # yield [] nil
865 * # yield 1 [1] 1
866 * # yield 1, 2 [1, 2] [1, 2]
867 * # yield nil [nil] nil
868 * # yield [1, 2] [[1, 2]] [1, 2]
869 *
870 */
871
872static VALUE
873enumerator_next_values(VALUE obj)
874{
875 struct enumerator *e = enumerator_ptr(obj);
876 VALUE vs;
877
878 rb_check_frozen(obj);
879
880 if (!UNDEF_P(e->lookahead)) {
881 vs = e->lookahead;
882 e->lookahead = Qundef;
883 return vs;
884 }
885
886 return get_next_values(obj, e);
887}
888
889static VALUE
890ary2sv(VALUE args, int dup)
891{
892 if (!RB_TYPE_P(args, T_ARRAY))
893 return args;
894
895 switch (RARRAY_LEN(args)) {
896 case 0:
897 return Qnil;
898
899 case 1:
900 return RARRAY_AREF(args, 0);
901
902 default:
903 if (dup)
904 return rb_ary_dup(args);
905 return args;
906 }
907}
908
909/*
910 * call-seq:
911 * e.next -> object
912 *
913 * Returns the next object in the enumerator, and move the internal position
914 * forward. When the position reached at the end, StopIteration is raised.
915 *
916 * === Example
917 *
918 * a = [1,2,3]
919 * e = a.to_enum
920 * p e.next #=> 1
921 * p e.next #=> 2
922 * p e.next #=> 3
923 * p e.next #raises StopIteration
924 *
925 * See class-level notes about external iterators.
926 *
927 */
928
929static VALUE
930enumerator_next(VALUE obj)
931{
932 VALUE vs = enumerator_next_values(obj);
933 return ary2sv(vs, 0);
934}
935
936static VALUE
937enumerator_peek_values(VALUE obj)
938{
939 struct enumerator *e = enumerator_ptr(obj);
940
941 rb_check_frozen(obj);
942
943 if (UNDEF_P(e->lookahead)) {
944 RB_OBJ_WRITE(obj, &e->lookahead, get_next_values(obj, e));
945 }
946
947 return e->lookahead;
948}
949
950/*
951 * call-seq:
952 * e.peek_values -> array
953 *
954 * Returns the next object as an array, similar to Enumerator#next_values, but
955 * doesn't move the internal position forward. If the position is already at
956 * the end, StopIteration is raised.
957 *
958 * See class-level notes about external iterators.
959 *
960 * === Example
961 *
962 * o = Object.new
963 * def o.each
964 * yield
965 * yield 1
966 * yield 1, 2
967 * end
968 * e = o.to_enum
969 * p e.peek_values #=> []
970 * e.next
971 * p e.peek_values #=> [1]
972 * p e.peek_values #=> [1]
973 * e.next
974 * p e.peek_values #=> [1, 2]
975 * e.next
976 * p e.peek_values # raises StopIteration
977 *
978 */
979
980static VALUE
981enumerator_peek_values_m(VALUE obj)
982{
983 return rb_ary_dup(enumerator_peek_values(obj));
984}
985
986/*
987 * call-seq:
988 * e.peek -> object
989 *
990 * Returns the next object in the enumerator, but doesn't move the internal
991 * position forward. If the position is already at the end, StopIteration
992 * is raised.
993 *
994 * See class-level notes about external iterators.
995 *
996 * === Example
997 *
998 * a = [1,2,3]
999 * e = a.to_enum
1000 * p e.next #=> 1
1001 * p e.peek #=> 2
1002 * p e.peek #=> 2
1003 * p e.peek #=> 2
1004 * p e.next #=> 2
1005 * p e.next #=> 3
1006 * p e.peek #raises StopIteration
1007 *
1008 */
1009
1010static VALUE
1011enumerator_peek(VALUE obj)
1012{
1013 VALUE vs = enumerator_peek_values(obj);
1014 return ary2sv(vs, 1);
1015}
1016
1017/*
1018 * call-seq:
1019 * e.feed obj -> nil
1020 *
1021 * Sets the value to be returned by the next yield inside +e+.
1022 *
1023 * If the value is not set, the yield returns nil.
1024 *
1025 * This value is cleared after being yielded.
1026 *
1027 * # Array#map passes the array's elements to "yield" and collects the
1028 * # results of "yield" as an array.
1029 * # Following example shows that "next" returns the passed elements and
1030 * # values passed to "feed" are collected as an array which can be
1031 * # obtained by StopIteration#result.
1032 * e = [1,2,3].map
1033 * p e.next #=> 1
1034 * e.feed "a"
1035 * p e.next #=> 2
1036 * e.feed "b"
1037 * p e.next #=> 3
1038 * e.feed "c"
1039 * begin
1040 * e.next
1041 * rescue StopIteration
1042 * p $!.result #=> ["a", "b", "c"]
1043 * end
1044 *
1045 * o = Object.new
1046 * def o.each
1047 * x = yield # (2) blocks
1048 * p x # (5) => "foo"
1049 * x = yield # (6) blocks
1050 * p x # (8) => nil
1051 * x = yield # (9) blocks
1052 * p x # not reached w/o another e.next
1053 * end
1054 *
1055 * e = o.to_enum
1056 * e.next # (1)
1057 * e.feed "foo" # (3)
1058 * e.next # (4)
1059 * e.next # (7)
1060 * # (10)
1061 */
1062
1063static VALUE
1064enumerator_feed(VALUE obj, VALUE v)
1065{
1066 struct enumerator *e = enumerator_ptr(obj);
1067
1068 rb_check_frozen(obj);
1069
1070 if (!UNDEF_P(e->feedvalue)) {
1071 rb_raise(rb_eTypeError, "feed value already set");
1072 }
1073 RB_OBJ_WRITE(obj, &e->feedvalue, v);
1074
1075 return Qnil;
1076}
1077
1078/*
1079 * call-seq:
1080 * e.rewind -> e
1081 *
1082 * Rewinds the enumeration sequence to the beginning.
1083 *
1084 * If the enclosed object responds to a "rewind" method, it is called.
1085 */
1086
1087static VALUE
1088enumerator_rewind(VALUE obj)
1089{
1090 struct enumerator *e = enumerator_ptr(obj);
1091
1092 rb_check_frozen(obj);
1093
1094 rb_check_funcall(e->obj, id_rewind, 0, 0);
1095
1096 e->fib = 0;
1097 e->dst = Qnil;
1098 e->lookahead = Qundef;
1099 e->feedvalue = Qundef;
1100 e->stop_exc = Qfalse;
1101 return obj;
1102}
1103
1104static struct generator *generator_ptr(VALUE obj);
1105static VALUE append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args);
1106
1107static VALUE
1108inspect_enumerator(VALUE obj, VALUE dummy, int recur)
1109{
1110 struct enumerator *e;
1111 VALUE eobj, str, cname;
1112
1113 TypedData_Get_Struct(obj, struct enumerator, &enumerator_data_type, e);
1114
1115 cname = rb_obj_class(obj);
1116
1117 if (!e || UNDEF_P(e->obj)) {
1118 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(cname));
1119 }
1120
1121 if (recur) {
1122 str = rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(cname));
1123 return str;
1124 }
1125
1126 if (e->procs) {
1127 long i;
1128
1129 eobj = generator_ptr(e->obj)->obj;
1130 /* In case procs chained enumerator traversing all proc entries manually */
1131 if (rb_obj_class(eobj) == cname) {
1132 str = rb_inspect(eobj);
1133 }
1134 else {
1135 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(cname), eobj);
1136 }
1137 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1138 str = rb_sprintf("#<%"PRIsVALUE": %"PRIsVALUE, cname, str);
1139 append_method(RARRAY_AREF(e->procs, i), str, e->meth, e->args);
1140 rb_str_buf_cat2(str, ">");
1141 }
1142 return str;
1143 }
1144
1145 eobj = rb_attr_get(obj, id_receiver);
1146 if (NIL_P(eobj)) {
1147 eobj = e->obj;
1148 }
1149
1150 /* (1..100).each_cons(2) => "#<Enumerator: 1..100:each_cons(2)>" */
1151 str = rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE, rb_class_path(cname), eobj);
1152 append_method(obj, str, e->meth, e->args);
1153
1154 rb_str_buf_cat2(str, ">");
1155
1156 return str;
1157}
1158
1159static int
1160key_symbol_p(VALUE key, VALUE val, VALUE arg)
1161{
1162 if (SYMBOL_P(key)) return ST_CONTINUE;
1163 *(int *)arg = FALSE;
1164 return ST_STOP;
1165}
1166
1167static int
1168kwd_append(VALUE key, VALUE val, VALUE str)
1169{
1170 if (!SYMBOL_P(key)) rb_raise(rb_eRuntimeError, "non-symbol key inserted");
1171 rb_str_catf(str, "% "PRIsVALUE": %"PRIsVALUE", ", key, val);
1172 return ST_CONTINUE;
1173}
1174
1175static VALUE
1176append_method(VALUE obj, VALUE str, ID default_method, VALUE default_args)
1177{
1178 VALUE method, eargs;
1179
1180 method = rb_attr_get(obj, id_method);
1181 if (method != Qfalse) {
1182 if (!NIL_P(method)) {
1183 Check_Type(method, T_SYMBOL);
1184 method = rb_sym2str(method);
1185 }
1186 else {
1187 method = rb_id2str(default_method);
1188 }
1189 rb_str_buf_cat2(str, ":");
1190 rb_str_buf_append(str, method);
1191 }
1192
1193 eargs = rb_attr_get(obj, id_arguments);
1194 if (NIL_P(eargs)) {
1195 eargs = default_args;
1196 }
1197 if (eargs != Qfalse) {
1198 long argc = RARRAY_LEN(eargs);
1199 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
1200
1201 if (argc > 0) {
1202 VALUE kwds = Qnil;
1203
1204 rb_str_buf_cat2(str, "(");
1205
1206 if (RB_TYPE_P(argv[argc-1], T_HASH) && !RHASH_EMPTY_P(argv[argc-1])) {
1207 int all_key = TRUE;
1208 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
1209 if (all_key) kwds = argv[--argc];
1210 }
1211
1212 while (argc--) {
1213 VALUE arg = *argv++;
1214
1215 rb_str_append(str, rb_inspect(arg));
1216 rb_str_buf_cat2(str, ", ");
1217 }
1218 if (!NIL_P(kwds)) {
1219 rb_hash_foreach(kwds, kwd_append, str);
1220 }
1221 rb_str_set_len(str, RSTRING_LEN(str)-2);
1222 rb_str_buf_cat2(str, ")");
1223 }
1224 }
1225
1226 return str;
1227}
1228
1229/*
1230 * call-seq:
1231 * e.inspect -> string
1232 *
1233 * Creates a printable version of <i>e</i>.
1234 */
1235
1236static VALUE
1237enumerator_inspect(VALUE obj)
1238{
1239 return rb_exec_recursive(inspect_enumerator, obj, 0);
1240}
1241
1242/*
1243 * call-seq:
1244 * e.size -> int, Float::INFINITY or nil
1245 *
1246 * Returns the size of the enumerator, or +nil+ if it can't be calculated lazily.
1247 *
1248 * (1..100).to_a.permutation(4).size # => 94109400
1249 * loop.size # => Float::INFINITY
1250 * (1..100).drop_while.size # => nil
1251 *
1252 * Note that enumerator size might be inaccurate, and should be rather treated as a hint.
1253 * For example, there is no check that the size provided to ::new is accurate:
1254 *
1255 * e = Enumerator.new(5) { |y| 2.times { y << it} }
1256 * e.size # => 5
1257 * e.to_a.size # => 2
1258 *
1259 * Another example is an enumerator created by ::produce without a +size+ argument.
1260 * Such enumerators return +Infinity+ for size, but this is inaccurate if the passed
1261 * block raises StopIteration:
1262 *
1263 * e = Enumerator.produce(1) { it + 1 }
1264 * e.size # => Infinity
1265 *
1266 * e = Enumerator.produce(1) { it > 3 ? raise(StopIteration) : it + 1 }
1267 * e.size # => Infinity
1268 * e.to_a.size # => 4
1269 */
1270
1271static VALUE
1272enumerator_size(VALUE obj)
1273{
1274 struct enumerator *e = enumerator_ptr(obj);
1275 int argc = 0;
1276 const VALUE *argv = NULL;
1277 VALUE size;
1278
1279 if (e->procs) {
1280 struct generator *g = generator_ptr(e->obj);
1281 VALUE receiver = rb_check_funcall(g->obj, id_size, 0, 0);
1282 long i = 0;
1283
1284 for (i = 0; i < RARRAY_LEN(e->procs); i++) {
1285 VALUE proc = RARRAY_AREF(e->procs, i);
1286 struct proc_entry *entry = proc_entry_ptr(proc);
1287 lazyenum_size_func *size_fn = entry->fn->size;
1288 if (!size_fn) {
1289 return Qnil;
1290 }
1291 receiver = (*size_fn)(proc, receiver);
1292 }
1293 return receiver;
1294 }
1295
1296 if (e->size_fn) {
1297 return (*e->size_fn)(e->obj, e->args, obj);
1298 }
1299 if (e->args) {
1300 argc = (int)RARRAY_LEN(e->args);
1301 argv = RARRAY_CONST_PTR(e->args);
1302 }
1303 size = rb_check_funcall_kw(e->size, id_call, argc, argv, e->kw_splat);
1304 if (!UNDEF_P(size)) return size;
1305 return e->size;
1306}
1307
1308/*
1309 * Yielder
1310 */
1311static void
1312yielder_mark_and_move(void *p)
1313{
1314 struct yielder *ptr = p;
1315 rb_gc_mark_and_move(&ptr->proc);
1316}
1317
1318static const rb_data_type_t yielder_data_type = {
1319 "yielder",
1320 {
1321 yielder_mark_and_move,
1323 NULL,
1324 yielder_mark_and_move,
1325 },
1326 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1327};
1328
1329static struct yielder *
1330yielder_ptr(VALUE obj)
1331{
1332 struct yielder *ptr;
1333
1334 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1335 if (!ptr || UNDEF_P(ptr->proc)) {
1336 rb_raise(rb_eArgError, "uninitialized yielder");
1337 }
1338 return ptr;
1339}
1340
1341/* :nodoc: */
1342static VALUE
1343yielder_allocate(VALUE klass)
1344{
1345 struct yielder *ptr;
1346 VALUE obj;
1347
1348 obj = TypedData_Make_Struct(klass, struct yielder, &yielder_data_type, ptr);
1349 ptr->proc = Qundef;
1350
1351 return obj;
1352}
1353
1354static VALUE
1355yielder_init(VALUE obj, VALUE proc)
1356{
1357 struct yielder *ptr;
1358
1359 TypedData_Get_Struct(obj, struct yielder, &yielder_data_type, ptr);
1360
1361 if (!ptr) {
1362 rb_raise(rb_eArgError, "unallocated yielder");
1363 }
1364
1365 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1366
1367 return obj;
1368}
1369
1370/* :nodoc: */
1371static VALUE
1372yielder_initialize(VALUE obj)
1373{
1374 rb_need_block();
1375
1376 return yielder_init(obj, rb_block_proc());
1377}
1378
1379/* :nodoc: */
1380static VALUE
1381yielder_yield(VALUE obj, VALUE args)
1382{
1383 struct yielder *ptr = yielder_ptr(obj);
1384
1385 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1386}
1387
1388/* :nodoc: */
1389static VALUE
1390yielder_yield_push(VALUE obj, VALUE arg)
1391{
1392 struct yielder *ptr = yielder_ptr(obj);
1393
1394 rb_proc_call_with_block(ptr->proc, 1, &arg, Qnil);
1395
1396 return obj;
1397}
1398
1399/*
1400 * Returns a Proc object that takes arguments and yields them.
1401 *
1402 * This method is implemented so that a Yielder object can be directly
1403 * passed to another method as a block argument.
1404 *
1405 * enum = Enumerator.new { |y|
1406 * Dir.glob("*.rb") { |file|
1407 * File.open(file) { |f| f.each_line(&y) }
1408 * }
1409 * }
1410 */
1411static VALUE
1412yielder_to_proc(VALUE obj)
1413{
1414 VALUE method = rb_obj_method(obj, sym_yield);
1415
1416 return rb_funcall(method, idTo_proc, 0);
1417}
1418
1419static VALUE
1420yielder_yield_i(RB_BLOCK_CALL_FUNC_ARGLIST(obj, memo))
1421{
1422 return rb_yield_values_kw(argc, argv, RB_PASS_CALLED_KEYWORDS);
1423}
1424
1425static VALUE
1426yielder_new(void)
1427{
1428 return yielder_init(yielder_allocate(rb_cYielder), rb_proc_new(yielder_yield_i, 0));
1429}
1430
1431/*
1432 * Generator
1433 */
1434static void
1435generator_mark_and_move(void *p)
1436{
1437 struct generator *ptr = p;
1438 rb_gc_mark_and_move(&ptr->proc);
1439 rb_gc_mark_and_move(&ptr->obj);
1440}
1441
1442static const rb_data_type_t generator_data_type = {
1443 "generator",
1444 {
1445 generator_mark_and_move,
1447 NULL,
1448 generator_mark_and_move,
1449 },
1450 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
1451};
1452
1453static struct generator *
1454generator_ptr(VALUE obj)
1455{
1456 struct generator *ptr;
1457
1458 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1459 if (!ptr || UNDEF_P(ptr->proc)) {
1460 rb_raise(rb_eArgError, "uninitialized generator");
1461 }
1462 return ptr;
1463}
1464
1465/* :nodoc: */
1466static VALUE
1467generator_allocate(VALUE klass)
1468{
1469 struct generator *ptr;
1470 VALUE obj;
1471
1472 obj = TypedData_Make_Struct(klass, struct generator, &generator_data_type, ptr);
1473 ptr->proc = Qundef;
1474
1475 return obj;
1476}
1477
1478static VALUE
1479generator_init(VALUE obj, VALUE proc)
1480{
1481 struct generator *ptr;
1482
1483 rb_check_frozen(obj);
1484 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr);
1485
1486 if (!ptr) {
1487 rb_raise(rb_eArgError, "unallocated generator");
1488 }
1489
1490 RB_OBJ_WRITE(obj, &ptr->proc, proc);
1491
1492 return obj;
1493}
1494
1495/* :nodoc: */
1496static VALUE
1497generator_initialize(int argc, VALUE *argv, VALUE obj)
1498{
1499 VALUE proc;
1500
1501 if (argc == 0) {
1502 rb_need_block();
1503
1504 proc = rb_block_proc();
1505 }
1506 else {
1507 rb_scan_args(argc, argv, "1", &proc);
1508
1509 if (!rb_obj_is_proc(proc))
1510 rb_raise(rb_eTypeError,
1511 "wrong argument type %"PRIsVALUE" (expected Proc)",
1512 rb_obj_class(proc));
1513
1514 if (rb_block_given_p()) {
1515 rb_warn("given block not used");
1516 }
1517 }
1518
1519 return generator_init(obj, proc);
1520}
1521
1522/* :nodoc: */
1523static VALUE
1524generator_init_copy(VALUE obj, VALUE orig)
1525{
1526 struct generator *ptr0, *ptr1;
1527
1528 if (!OBJ_INIT_COPY(obj, orig)) return obj;
1529
1530 ptr0 = generator_ptr(orig);
1531
1532 TypedData_Get_Struct(obj, struct generator, &generator_data_type, ptr1);
1533
1534 if (!ptr1) {
1535 rb_raise(rb_eArgError, "unallocated generator");
1536 }
1537
1538 RB_OBJ_WRITE(obj, &ptr1->proc, ptr0->proc);
1539
1540 return obj;
1541}
1542
1543/* :nodoc: */
1544static VALUE
1545generator_each(int argc, VALUE *argv, VALUE obj)
1546{
1547 struct generator *ptr = generator_ptr(obj);
1548 VALUE args = rb_ary_new2(argc + 1);
1549
1550 rb_ary_push(args, yielder_new());
1551 if (argc > 0) {
1552 rb_ary_cat(args, argv, argc);
1553 }
1554
1555 return rb_proc_call_kw(ptr->proc, args, RB_PASS_CALLED_KEYWORDS);
1556}
1557
1558/* Lazy Enumerator methods */
1559static VALUE
1560enum_size(VALUE self)
1561{
1562 VALUE r = rb_check_funcall(self, id_size, 0, 0);
1563 return UNDEF_P(r) ? Qnil : r;
1564}
1565
1566static VALUE
1567lazyenum_size(VALUE self, VALUE args, VALUE eobj)
1568{
1569 return enum_size(self);
1570}
1571
1572#define lazy_receiver_size lazy_map_size
1573
1574static VALUE
1575lazy_init_iterator(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1576{
1577 VALUE result;
1578 if (argc == 1) {
1579 VALUE args[2];
1580 args[0] = m;
1581 args[1] = val;
1582 result = rb_yield_values2(2, args);
1583 }
1584 else {
1585 VALUE args;
1586 int len = rb_long2int((long)argc + 1);
1587 VALUE *nargv = ALLOCV_N(VALUE, args, len);
1588
1589 nargv[0] = m;
1590 if (argc > 0) {
1591 MEMCPY(nargv + 1, argv, VALUE, argc);
1592 }
1593 result = rb_yield_values2(len, nargv);
1594 ALLOCV_END(args);
1595 }
1596 if (UNDEF_P(result)) rb_iter_break();
1597 return Qnil;
1598}
1599
1600static VALUE
1601lazy_init_block_i(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1602{
1603 rb_block_call(m, id_each, argc-1, argv+1, lazy_init_iterator, val);
1604 return Qnil;
1605}
1606
1607#define memo_value v2
1608#define memo_flags u3.state
1609#define LAZY_MEMO_BREAK 1
1610#define LAZY_MEMO_PACKED 2
1611#define LAZY_MEMO_BREAK_P(memo) ((memo)->memo_flags & LAZY_MEMO_BREAK)
1612#define LAZY_MEMO_PACKED_P(memo) ((memo)->memo_flags & LAZY_MEMO_PACKED)
1613#define LAZY_MEMO_SET_BREAK(memo) ((memo)->memo_flags |= LAZY_MEMO_BREAK)
1614#define LAZY_MEMO_RESET_BREAK(memo) ((memo)->memo_flags &= ~LAZY_MEMO_BREAK)
1615#define LAZY_MEMO_SET_VALUE(memo, value) MEMO_V2_SET(memo, value)
1616#define LAZY_MEMO_SET_PACKED(memo) ((memo)->memo_flags |= LAZY_MEMO_PACKED)
1617#define LAZY_MEMO_RESET_PACKED(memo) ((memo)->memo_flags &= ~LAZY_MEMO_PACKED)
1618
1619#define LAZY_NEED_BLOCK(func) \
1620 if (!rb_block_given_p()) { \
1621 rb_raise(rb_eArgError, "tried to call lazy " #func " without a block"); \
1622 }
1623
1624static VALUE lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i);
1625
1626static VALUE
1627lazy_init_yielder(RB_BLOCK_CALL_FUNC_ARGLIST(_, m))
1628{
1629 VALUE yielder = RARRAY_AREF(m, 0);
1630 VALUE procs_array = RARRAY_AREF(m, 1);
1631 VALUE memos = rb_attr_get(yielder, id_memo);
1632 struct MEMO *result;
1633
1634 result = rb_imemo_memo_new(m, rb_enum_values_pack(argc, argv),
1635 argc > 1 ? LAZY_MEMO_PACKED : 0);
1636 return lazy_yielder_result(result, yielder, procs_array, memos, 0);
1637}
1638
1639static VALUE
1640lazy_yielder_yield(struct MEMO *result, long memo_index, int argc, const VALUE *argv)
1641{
1642 VALUE m = result->v1;
1643 VALUE yielder = RARRAY_AREF(m, 0);
1644 VALUE procs_array = RARRAY_AREF(m, 1);
1645 VALUE memos = rb_attr_get(yielder, id_memo);
1646 LAZY_MEMO_SET_VALUE(result, rb_enum_values_pack(argc, argv));
1647 if (argc > 1)
1648 LAZY_MEMO_SET_PACKED(result);
1649 else
1650 LAZY_MEMO_RESET_PACKED(result);
1651 return lazy_yielder_result(result, yielder, procs_array, memos, memo_index);
1652}
1653
1654static VALUE
1655lazy_yielder_result(struct MEMO *result, VALUE yielder, VALUE procs_array, VALUE memos, long i)
1656{
1657 int cont = 1;
1658
1659 for (; i < RARRAY_LEN(procs_array); i++) {
1660 VALUE proc = RARRAY_AREF(procs_array, i);
1661 struct proc_entry *entry = proc_entry_ptr(proc);
1662 if (!(*entry->fn->proc)(proc, result, memos, i)) {
1663 cont = 0;
1664 break;
1665 }
1666 }
1667
1668 if (cont) {
1669 rb_funcall2(yielder, idLTLT, 1, &(result->memo_value));
1670 }
1671 if (LAZY_MEMO_BREAK_P(result)) {
1672 rb_iter_break();
1673 }
1674 return result->memo_value;
1675}
1676
1677static VALUE
1678lazy_init_block(RB_BLOCK_CALL_FUNC_ARGLIST(val, m))
1679{
1680 VALUE procs = RARRAY_AREF(m, 1);
1681
1682 rb_ivar_set(val, id_memo, rb_ary_new2(RARRAY_LEN(procs)));
1683 rb_block_call(RARRAY_AREF(m, 0), id_each, 0, 0,
1684 lazy_init_yielder, rb_ary_new3(2, val, procs));
1685 return Qnil;
1686}
1687
1688static VALUE
1689lazy_generator_init(VALUE enumerator, VALUE procs)
1690{
1692 VALUE obj;
1693 struct generator *gen_ptr;
1694 struct enumerator *e = enumerator_ptr(enumerator);
1695
1696 if (RARRAY_LEN(procs) > 0) {
1697 struct generator *old_gen_ptr = generator_ptr(e->obj);
1698 obj = old_gen_ptr->obj;
1699 }
1700 else {
1701 obj = enumerator;
1702 }
1703
1704 generator = generator_allocate(rb_cGenerator);
1705
1706 rb_block_call(generator, id_initialize, 0, 0,
1707 lazy_init_block, rb_ary_new3(2, obj, procs));
1708
1709 gen_ptr = generator_ptr(generator);
1710 RB_OBJ_WRITE(generator, &gen_ptr->obj, obj);
1711
1712 return generator;
1713}
1714
1715static int
1716lazy_precheck(VALUE procs)
1717{
1718 if (RTEST(procs)) {
1719 long num_procs = RARRAY_LEN(procs), i = num_procs;
1720 while (i-- > 0) {
1721 VALUE proc = RARRAY_AREF(procs, i);
1722 struct proc_entry *entry = proc_entry_ptr(proc);
1723 lazyenum_precheck_func *precheck = entry->fn->precheck;
1724 if (precheck && !precheck(proc)) return FALSE;
1725 }
1726 }
1727
1728 return TRUE;
1729}
1730
1731/*
1732 * Document-class: Enumerator::Lazy
1733 *
1734 * Enumerator::Lazy is a special type of Enumerator, that allows constructing
1735 * chains of operations without evaluating them immediately, and evaluating
1736 * values on as-needed basis. In order to do so it redefines most of Enumerable
1737 * methods so that they just construct another lazy enumerator.
1738 *
1739 * Enumerator::Lazy can be constructed from any Enumerable with the
1740 * Enumerable#lazy method.
1741 *
1742 * lazy = (1..Float::INFINITY).lazy.select(&:odd?).drop(10).take_while { |i| i < 30 }
1743 * # => #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:select>:drop(10)>:take_while>
1744 *
1745 * The real enumeration is performed when any non-redefined Enumerable method
1746 * is called, like Enumerable#first or Enumerable#to_a (the latter is aliased
1747 * as #force for more semantic code):
1748 *
1749 * lazy.first(2)
1750 * #=> [21, 23]
1751 *
1752 * lazy.force
1753 * #=> [21, 23, 25, 27, 29]
1754 *
1755 * Note that most Enumerable methods that could be called with or without
1756 * a block, on Enumerator::Lazy will always require a block:
1757 *
1758 * [1, 2, 3].map #=> #<Enumerator: [1, 2, 3]:map>
1759 * [1, 2, 3].lazy.map # ArgumentError: tried to call lazy map without a block
1760 *
1761 * This class allows idiomatic calculations on long or infinite sequences, as well
1762 * as chaining of calculations without constructing intermediate arrays.
1763 *
1764 * Example for working with a slowly calculated sequence:
1765 *
1766 * require 'open-uri'
1767 *
1768 * # This will fetch all URLs before selecting
1769 * # necessary data
1770 * URLS.map { |u| JSON.parse(URI.open(u).read) }
1771 * .select { |data| data.key?('stats') }
1772 * .first(5)
1773 *
1774 * # This will fetch URLs one-by-one, only till
1775 * # there is enough data to satisfy the condition
1776 * URLS.lazy.map { |u| JSON.parse(URI.open(u).read) }
1777 * .select { |data| data.key?('stats') }
1778 * .first(5)
1779 *
1780 * Ending a chain with ".eager" generates a non-lazy enumerator, which
1781 * is suitable for returning or passing to another method that expects
1782 * a normal enumerator.
1783 *
1784 * def active_items
1785 * groups
1786 * .lazy
1787 * .flat_map(&:items)
1788 * .reject(&:disabled)
1789 * .eager
1790 * end
1791 *
1792 * # This works lazily; if a checked item is found, it stops
1793 * # iteration and does not look into remaining groups.
1794 * first_checked = active_items.find(&:checked)
1795 *
1796 * # This returns an array of items like a normal enumerator does.
1797 * all_checked = active_items.select(&:checked)
1798 *
1799 */
1800
1801/*
1802 * call-seq:
1803 * Lazy.new(obj, size=nil) { |yielder, *values| block }
1804 *
1805 * Creates a new Lazy enumerator. When the enumerator is actually enumerated
1806 * (e.g. by calling #force), +obj+ will be enumerated and each value passed
1807 * to the given block. The block can yield values back using +yielder+.
1808 * For example, to create a "filter+map" enumerator:
1809 *
1810 * def filter_map(sequence)
1811 * Lazy.new(sequence) do |yielder, *values|
1812 * result = yield *values
1813 * yielder << result if result
1814 * end
1815 * end
1816 *
1817 * filter_map(1..Float::INFINITY) {|i| i*i if i.even?}.first(5)
1818 * #=> [4, 16, 36, 64, 100]
1819 */
1820static VALUE
1821lazy_initialize(int argc, VALUE *argv, VALUE self)
1822{
1823 VALUE obj, size = Qnil;
1825
1826 rb_check_arity(argc, 1, 2);
1827 LAZY_NEED_BLOCK(new);
1828 obj = argv[0];
1829 if (argc > 1) {
1830 size = argv[1];
1831 }
1832 generator = generator_allocate(rb_cGenerator);
1833 rb_block_call(generator, id_initialize, 0, 0, lazy_init_block_i, obj);
1834 enumerator_init(self, generator, sym_each, 0, 0, 0, size, 0);
1835 rb_ivar_set(self, id_receiver, obj);
1836
1837 return self;
1838}
1839
1840#if 0 /* for RDoc */
1841/*
1842 * call-seq:
1843 * lazy.to_a -> array
1844 * lazy.force -> array
1845 *
1846 * Expands +lazy+ enumerator to an array.
1847 * See Enumerable#to_a.
1848 */
1849static VALUE
1850lazy_to_a(VALUE self)
1851{
1852}
1853#endif
1854
1855static void
1856lazy_set_args(VALUE lazy, VALUE args)
1857{
1858 ID id = rb_frame_this_func();
1859 rb_ivar_set(lazy, id_method, ID2SYM(id));
1860 if (NIL_P(args)) {
1861 /* Qfalse indicates that the arguments are empty */
1862 rb_ivar_set(lazy, id_arguments, Qfalse);
1863 }
1864 else {
1865 rb_ivar_set(lazy, id_arguments, args);
1866 }
1867}
1868
1869#if 0
1870static VALUE
1871lazy_set_method(VALUE lazy, VALUE args, rb_enumerator_size_func *size_fn)
1872{
1873 struct enumerator *e = enumerator_ptr(lazy);
1874 lazy_set_args(lazy, args);
1875 e->size_fn = size_fn;
1876 return lazy;
1877}
1878#endif
1879
1880static VALUE
1881lazy_add_method(VALUE obj, int argc, VALUE *argv, VALUE args, VALUE memo,
1882 const lazyenum_funcs *fn)
1883{
1884 struct enumerator *new_e;
1885 VALUE new_obj;
1886 VALUE new_generator;
1887 VALUE new_procs;
1888 struct enumerator *e = enumerator_ptr(obj);
1889 struct proc_entry *entry;
1891 &proc_entry_data_type, entry);
1892 if (rb_block_given_p()) {
1893 RB_OBJ_WRITE(entry_obj, &entry->proc, rb_block_proc());
1894 }
1895 entry->fn = fn;
1896 RB_OBJ_WRITE(entry_obj, &entry->memo, args);
1897
1898 lazy_set_args(entry_obj, memo);
1899
1900 new_procs = RTEST(e->procs) ? rb_ary_dup(e->procs) : rb_ary_new();
1901 new_generator = lazy_generator_init(obj, new_procs);
1902 rb_ary_push(new_procs, entry_obj);
1903
1904 new_obj = enumerator_init_copy(enumerator_allocate(rb_cLazy), obj);
1905 new_e = RTYPEDDATA_GET_DATA(new_obj);
1906 RB_OBJ_WRITE(new_obj, &new_e->obj, new_generator);
1907 RB_OBJ_WRITE(new_obj, &new_e->procs, new_procs);
1908
1909 if (argc > 0) {
1910 new_e->meth = rb_to_id(*argv++);
1911 --argc;
1912 }
1913 else {
1914 new_e->meth = id_each;
1915 }
1916
1917 RB_OBJ_WRITE(new_obj, &new_e->args, rb_ary_new4(argc, argv));
1918
1919 return new_obj;
1920}
1921
1922/*
1923 * call-seq:
1924 * e.lazy -> lazy_enumerator
1925 *
1926 * Returns an Enumerator::Lazy, which redefines most Enumerable
1927 * methods to postpone enumeration and enumerate values only on an
1928 * as-needed basis.
1929 *
1930 * === Example
1931 *
1932 * The following program finds pythagorean triples:
1933 *
1934 * def pythagorean_triples
1935 * (1..Float::INFINITY).lazy.flat_map {|z|
1936 * (1..z).flat_map {|x|
1937 * (x..z).select {|y|
1938 * x**2 + y**2 == z**2
1939 * }.map {|y|
1940 * [x, y, z]
1941 * }
1942 * }
1943 * }
1944 * end
1945 * # show first ten pythagorean triples
1946 * p pythagorean_triples.take(10).force # take is lazy, so force is needed
1947 * p pythagorean_triples.first(10) # first is eager
1948 * # show pythagorean triples less than 100
1949 * p pythagorean_triples.take_while { |*, z| z < 100 }.force
1950 */
1951static VALUE
1952enumerable_lazy(VALUE obj)
1953{
1954 VALUE result = lazy_to_enum_i(obj, sym_each, 0, 0, lazyenum_size, rb_keyword_given_p());
1955 /* Qfalse indicates that the Enumerator::Lazy has no method name */
1956 rb_ivar_set(result, id_method, Qfalse);
1957 return result;
1958}
1959
1960static VALUE
1961lazy_to_enum_i(VALUE obj, VALUE meth, int argc, const VALUE *argv, rb_enumerator_size_func *size_fn, int kw_splat)
1962{
1963 return enumerator_init(enumerator_allocate(rb_cLazy),
1964 obj, meth, argc, argv, size_fn, Qnil, kw_splat);
1965}
1966
1967/*
1968 * call-seq:
1969 * lzy.to_enum(method = :each, *args) -> lazy_enum
1970 * lzy.enum_for(method = :each, *args) -> lazy_enum
1971 * lzy.to_enum(method = :each, *args) {|*args| block } -> lazy_enum
1972 * lzy.enum_for(method = :each, *args) {|*args| block } -> lazy_enum
1973 *
1974 * Similar to Object#to_enum, except it returns a lazy enumerator.
1975 * This makes it easy to define Enumerable methods that will
1976 * naturally remain lazy if called from a lazy enumerator.
1977 *
1978 * For example, continuing from the example in Object#to_enum:
1979 *
1980 * # See Object#to_enum for the definition of repeat
1981 * r = 1..Float::INFINITY
1982 * r.repeat(2).first(5) # => [1, 1, 2, 2, 3]
1983 * r.repeat(2).class # => Enumerator
1984 * r.repeat(2).map{|n| n ** 2}.first(5) # => endless loop!
1985 * # works naturally on lazy enumerator:
1986 * r.lazy.repeat(2).class # => Enumerator::Lazy
1987 * r.lazy.repeat(2).map{|n| n ** 2}.first(5) # => [1, 1, 4, 4, 9]
1988 */
1989
1990static VALUE
1991lazy_to_enum(int argc, VALUE *argv, VALUE self)
1992{
1993 VALUE lazy, meth = sym_each, super_meth;
1994
1995 if (argc > 0) {
1996 --argc;
1997 meth = *argv++;
1998 }
1999 if (RTEST((super_meth = rb_hash_aref(lazy_use_super_method, meth)))) {
2000 meth = super_meth;
2001 }
2002 lazy = lazy_to_enum_i(self, meth, argc, argv, 0, rb_keyword_given_p());
2003 if (rb_block_given_p()) {
2004 RB_OBJ_WRITE(lazy, &enumerator_ptr(lazy)->size, rb_block_proc());
2005 }
2006 return lazy;
2007}
2008
2009static VALUE
2010lazy_eager_size(VALUE self, VALUE args, VALUE eobj)
2011{
2012 return enum_size(self);
2013}
2014
2015/*
2016 * call-seq:
2017 * lzy.eager -> enum
2018 *
2019 * Returns a non-lazy Enumerator converted from the lazy enumerator.
2020 */
2021
2022static VALUE
2023lazy_eager(VALUE self)
2024{
2025 return enumerator_init(enumerator_allocate(rb_cEnumerator),
2026 self, sym_each, 0, 0, lazy_eager_size, Qnil, 0);
2027}
2028
2029static VALUE
2030lazyenum_yield(VALUE proc_entry, struct MEMO *result)
2031{
2032 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2033 return rb_proc_call_with_block(entry->proc, 1, &result->memo_value, Qnil);
2034}
2035
2036static VALUE
2037lazyenum_yield_values(VALUE proc_entry, struct MEMO *result)
2038{
2039 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2040 int argc = 1;
2041 const VALUE *argv = &result->memo_value;
2042 if (LAZY_MEMO_PACKED_P(result)) {
2043 const VALUE args = *argv;
2044 argc = RARRAY_LENINT(args);
2045 argv = RARRAY_CONST_PTR(args);
2046 }
2047 return rb_proc_call_with_block(entry->proc, argc, argv, Qnil);
2048}
2049
2050static struct MEMO *
2051lazy_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2052{
2053 VALUE value = lazyenum_yield_values(proc_entry, result);
2054 LAZY_MEMO_SET_VALUE(result, value);
2055 LAZY_MEMO_RESET_PACKED(result);
2056 return result;
2057}
2058
2059static VALUE
2060lazy_map_size(VALUE entry, VALUE receiver)
2061{
2062 return receiver;
2063}
2064
2065static const lazyenum_funcs lazy_map_funcs = {
2066 lazy_map_proc, lazy_map_size,
2067};
2068
2069/*
2070 * call-seq:
2071 * lazy.collect { |obj| block } -> lazy_enumerator
2072 * lazy.map { |obj| block } -> lazy_enumerator
2073 *
2074 * Like Enumerable#map, but chains operation to be lazy-evaluated.
2075 *
2076 * (1..Float::INFINITY).lazy.map {|i| i**2 }
2077 * #=> #<Enumerator::Lazy: #<Enumerator::Lazy: 1..Infinity>:map>
2078 * (1..Float::INFINITY).lazy.map {|i| i**2 }.first(3)
2079 * #=> [1, 4, 9]
2080 */
2081
2082static VALUE
2083lazy_map(VALUE obj)
2084{
2085 LAZY_NEED_BLOCK(map);
2086 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_map_funcs);
2087}
2088
2090 struct MEMO *result;
2091 long index;
2092};
2093
2094static VALUE
2095lazy_flat_map_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, y))
2096{
2097 struct flat_map_i_arg *arg = (struct flat_map_i_arg *)y;
2098
2099 return lazy_yielder_yield(arg->result, arg->index, argc, argv);
2100}
2101
2102static struct MEMO *
2103lazy_flat_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2104{
2105 VALUE value = lazyenum_yield_values(proc_entry, result);
2106 VALUE ary = 0;
2107 const long proc_index = memo_index + 1;
2108 int break_p = LAZY_MEMO_BREAK_P(result);
2109
2110 if (RB_TYPE_P(value, T_ARRAY)) {
2111 ary = value;
2112 }
2113 else if (rb_respond_to(value, id_force) && rb_respond_to(value, id_each)) {
2114 struct flat_map_i_arg arg = {.result = result, .index = proc_index};
2115 LAZY_MEMO_RESET_BREAK(result);
2116 rb_block_call(value, id_each, 0, 0, lazy_flat_map_i, (VALUE)&arg);
2117 if (break_p) LAZY_MEMO_SET_BREAK(result);
2118 return 0;
2119 }
2120
2121 if (ary || !NIL_P(ary = rb_check_array_type(value))) {
2122 long i;
2123 LAZY_MEMO_RESET_BREAK(result);
2124 for (i = 0; i + 1 < RARRAY_LEN(ary); i++) {
2125 const VALUE argv = RARRAY_AREF(ary, i);
2126 lazy_yielder_yield(result, proc_index, 1, &argv);
2127 }
2128 if (break_p) LAZY_MEMO_SET_BREAK(result);
2129 if (i >= RARRAY_LEN(ary)) return 0;
2130 value = RARRAY_AREF(ary, i);
2131 }
2132 LAZY_MEMO_SET_VALUE(result, value);
2133 LAZY_MEMO_RESET_PACKED(result);
2134 return result;
2135}
2136
2137static const lazyenum_funcs lazy_flat_map_funcs = {
2138 lazy_flat_map_proc, 0,
2139};
2140
2141/*
2142 * call-seq:
2143 * lazy.collect_concat { |obj| block } -> a_lazy_enumerator
2144 * lazy.flat_map { |obj| block } -> a_lazy_enumerator
2145 *
2146 * Returns a new lazy enumerator with the concatenated results of running
2147 * +block+ once for every element in the lazy enumerator.
2148 *
2149 * ["foo", "bar"].lazy.flat_map {|i| i.each_char.lazy}.force
2150 * #=> ["f", "o", "o", "b", "a", "r"]
2151 *
2152 * A value +x+ returned by +block+ is decomposed if either of
2153 * the following conditions is true:
2154 *
2155 * * +x+ responds to both each and force, which means that
2156 * +x+ is a lazy enumerator.
2157 * * +x+ is an array or responds to to_ary.
2158 *
2159 * Otherwise, +x+ is contained as-is in the return value.
2160 *
2161 * [{a:1}, {b:2}].lazy.flat_map {|i| i}.force
2162 * #=> [{:a=>1}, {:b=>2}]
2163 */
2164static VALUE
2165lazy_flat_map(VALUE obj)
2166{
2167 LAZY_NEED_BLOCK(flat_map);
2168 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_flat_map_funcs);
2169}
2170
2171static struct MEMO *
2172lazy_select_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2173{
2174 VALUE chain = lazyenum_yield(proc_entry, result);
2175 if (!RTEST(chain)) return 0;
2176 return result;
2177}
2178
2179static const lazyenum_funcs lazy_select_funcs = {
2180 lazy_select_proc, 0,
2181};
2182
2183/*
2184 * call-seq:
2185 * lazy.find_all { |obj| block } -> lazy_enumerator
2186 * lazy.select { |obj| block } -> lazy_enumerator
2187 * lazy.filter { |obj| block } -> lazy_enumerator
2188 *
2189 * Like Enumerable#select, but chains operation to be lazy-evaluated.
2190 */
2191static VALUE
2192lazy_select(VALUE obj)
2193{
2194 LAZY_NEED_BLOCK(select);
2195 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_select_funcs);
2196}
2197
2198static struct MEMO *
2199lazy_filter_map_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2200{
2201 VALUE value = lazyenum_yield_values(proc_entry, result);
2202 if (!RTEST(value)) return 0;
2203 LAZY_MEMO_SET_VALUE(result, value);
2204 LAZY_MEMO_RESET_PACKED(result);
2205 return result;
2206}
2207
2208static const lazyenum_funcs lazy_filter_map_funcs = {
2209 lazy_filter_map_proc, 0,
2210};
2211
2212/*
2213 * call-seq:
2214 * lazy.filter_map { |obj| block } -> lazy_enumerator
2215 *
2216 * Like Enumerable#filter_map, but chains operation to be lazy-evaluated.
2217 *
2218 * (1..).lazy.filter_map { |i| i * 2 if i.even? }.first(5)
2219 * #=> [4, 8, 12, 16, 20]
2220 */
2221
2222static VALUE
2223lazy_filter_map(VALUE obj)
2224{
2225 LAZY_NEED_BLOCK(filter_map);
2226 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_filter_map_funcs);
2227}
2228
2229static struct MEMO *
2230lazy_reject_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2231{
2232 VALUE chain = lazyenum_yield(proc_entry, result);
2233 if (RTEST(chain)) return 0;
2234 return result;
2235}
2236
2237static const lazyenum_funcs lazy_reject_funcs = {
2238 lazy_reject_proc, 0,
2239};
2240
2241/*
2242 * call-seq:
2243 * lazy.reject { |obj| block } -> lazy_enumerator
2244 *
2245 * Like Enumerable#reject, but chains operation to be lazy-evaluated.
2246 */
2247
2248static VALUE
2249lazy_reject(VALUE obj)
2250{
2251 LAZY_NEED_BLOCK(reject);
2252 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_reject_funcs);
2253}
2254
2255static struct MEMO *
2256lazy_grep_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2257{
2258 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2259 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2260 if (!RTEST(chain)) return 0;
2261 return result;
2262}
2263
2264static struct MEMO *
2265lazy_grep_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2266{
2267 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2268 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2269
2270 if (!RTEST(chain)) return 0;
2271 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2272 LAZY_MEMO_SET_VALUE(result, value);
2273 LAZY_MEMO_RESET_PACKED(result);
2274
2275 return result;
2276}
2277
2278static const lazyenum_funcs lazy_grep_iter_funcs = {
2279 lazy_grep_iter_proc, 0,
2280};
2281
2282static const lazyenum_funcs lazy_grep_funcs = {
2283 lazy_grep_proc, 0,
2284};
2285
2286/*
2287 * call-seq:
2288 * lazy.grep(pattern) -> lazy_enumerator
2289 * lazy.grep(pattern) { |obj| block } -> lazy_enumerator
2290 *
2291 * Like Enumerable#grep, but chains operation to be lazy-evaluated.
2292 */
2293
2294static VALUE
2295lazy_grep(VALUE obj, VALUE pattern)
2296{
2297 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2298 &lazy_grep_iter_funcs : &lazy_grep_funcs;
2299 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2300}
2301
2302static struct MEMO *
2303lazy_grep_v_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2304{
2305 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2306 VALUE chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2307 if (RTEST(chain)) return 0;
2308 return result;
2309}
2310
2311static struct MEMO *
2312lazy_grep_v_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2313{
2314 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2315 VALUE value, chain = rb_funcall(entry->memo, id_eqq, 1, result->memo_value);
2316
2317 if (RTEST(chain)) return 0;
2318 value = rb_proc_call_with_block(entry->proc, 1, &(result->memo_value), Qnil);
2319 LAZY_MEMO_SET_VALUE(result, value);
2320 LAZY_MEMO_RESET_PACKED(result);
2321
2322 return result;
2323}
2324
2325static const lazyenum_funcs lazy_grep_v_iter_funcs = {
2326 lazy_grep_v_iter_proc, 0,
2327};
2328
2329static const lazyenum_funcs lazy_grep_v_funcs = {
2330 lazy_grep_v_proc, 0,
2331};
2332
2333/*
2334 * call-seq:
2335 * lazy.grep_v(pattern) -> lazy_enumerator
2336 * lazy.grep_v(pattern) { |obj| block } -> lazy_enumerator
2337 *
2338 * Like Enumerable#grep_v, but chains operation to be lazy-evaluated.
2339 */
2340
2341static VALUE
2342lazy_grep_v(VALUE obj, VALUE pattern)
2343{
2344 const lazyenum_funcs *const funcs = rb_block_given_p() ?
2345 &lazy_grep_v_iter_funcs : &lazy_grep_v_funcs;
2346 return lazy_add_method(obj, 0, 0, pattern, rb_ary_new3(1, pattern), funcs);
2347}
2348
2349static VALUE
2350call_next(VALUE obj)
2351{
2352 return rb_funcall(obj, id_next, 0);
2353}
2354
2355static VALUE
2356next_stopped(VALUE obj, VALUE _)
2357{
2358 return Qnil;
2359}
2360
2361static struct MEMO *
2362lazy_zip_arrays_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2363{
2364 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2365 VALUE ary, arrays = entry->memo;
2366 VALUE memo = rb_ary_entry(memos, memo_index);
2367 long i, count = NIL_P(memo) ? 0 : NUM2LONG(memo);
2368
2369 ary = rb_ary_new2(RARRAY_LEN(arrays) + 1);
2370 rb_ary_push(ary, result->memo_value);
2371 for (i = 0; i < RARRAY_LEN(arrays); i++) {
2372 rb_ary_push(ary, rb_ary_entry(RARRAY_AREF(arrays, i), count));
2373 }
2374 LAZY_MEMO_SET_VALUE(result, ary);
2375 rb_ary_store(memos, memo_index, LONG2NUM(++count));
2376 return result;
2377}
2378
2379static struct MEMO *
2380lazy_zip_func(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2381{
2382 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2383 VALUE arg = rb_ary_entry(memos, memo_index);
2384 VALUE zip_args = entry->memo;
2385 VALUE ary, v;
2386 long i;
2387
2388 if (NIL_P(arg)) {
2389 arg = rb_ary_new2(RARRAY_LEN(zip_args));
2390 for (i = 0; i < RARRAY_LEN(zip_args); i++) {
2391 rb_ary_push(arg, rb_funcall(RARRAY_AREF(zip_args, i), id_to_enum, 0));
2392 }
2393 rb_ary_store(memos, memo_index, arg);
2394 }
2395
2396 ary = rb_ary_new2(RARRAY_LEN(arg) + 1);
2397 rb_ary_push(ary, result->memo_value);
2398 for (i = 0; i < RARRAY_LEN(arg); i++) {
2399 v = rb_rescue2(call_next, RARRAY_AREF(arg, i), next_stopped, 0,
2401 rb_ary_push(ary, v);
2402 }
2403 LAZY_MEMO_SET_VALUE(result, ary);
2404 return result;
2405}
2406
2407static const lazyenum_funcs lazy_zip_funcs[] = {
2408 {lazy_zip_func, lazy_receiver_size,},
2409 {lazy_zip_arrays_func, lazy_receiver_size,},
2410};
2411
2412/*
2413 * call-seq:
2414 * lazy.zip(arg, ...) -> lazy_enumerator
2415 * lazy.zip(arg, ...) { |arr| block } -> nil
2416 *
2417 * Like Enumerable#zip, but chains operation to be lazy-evaluated.
2418 * However, if a block is given to zip, values are enumerated immediately.
2419 */
2420static VALUE
2421lazy_zip(int argc, VALUE *argv, VALUE obj)
2422{
2423 VALUE ary, v;
2424 long i;
2425 const lazyenum_funcs *funcs = &lazy_zip_funcs[1];
2426
2427 if (rb_block_given_p()) {
2428 return rb_call_super(argc, argv);
2429 }
2430
2431 ary = rb_ary_new2(argc);
2432 for (i = 0; i < argc; i++) {
2433 v = rb_check_array_type(argv[i]);
2434 if (NIL_P(v)) {
2435 for (; i < argc; i++) {
2436 if (!rb_respond_to(argv[i], id_each)) {
2437 rb_raise(rb_eTypeError, "wrong argument type %"PRIsVALUE" (must respond to :each)",
2438 rb_obj_class(argv[i]));
2439 }
2440 }
2441 ary = rb_ary_new4(argc, argv);
2442 funcs = &lazy_zip_funcs[0];
2443 break;
2444 }
2445 rb_ary_push(ary, v);
2446 }
2447
2448 return lazy_add_method(obj, 0, 0, ary, ary, funcs);
2449}
2450
2451static struct MEMO *
2452lazy_take_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2453{
2454 long remain;
2455 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2456 VALUE memo = rb_ary_entry(memos, memo_index);
2457
2458 if (NIL_P(memo)) {
2459 memo = entry->memo;
2460 }
2461
2462 remain = NUM2LONG(memo);
2463 if (--remain == 0) LAZY_MEMO_SET_BREAK(result);
2464 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2465 return result;
2466}
2467
2468static VALUE
2469lazy_take_size(VALUE entry, VALUE receiver)
2470{
2471 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(entry, id_arguments), 0));
2472 if (NIL_P(receiver) || (FIXNUM_P(receiver) && FIX2LONG(receiver) < len))
2473 return receiver;
2474 return LONG2NUM(len);
2475}
2476
2477static int
2478lazy_take_precheck(VALUE proc_entry)
2479{
2480 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2481 return entry->memo != INT2FIX(0);
2482}
2483
2484static const lazyenum_funcs lazy_take_funcs = {
2485 lazy_take_proc, lazy_take_size, lazy_take_precheck,
2486};
2487
2488/*
2489 * call-seq:
2490 * lazy.take(n) -> lazy_enumerator
2491 *
2492 * Like Enumerable#take, but chains operation to be lazy-evaluated.
2493 */
2494
2495static VALUE
2496lazy_take(VALUE obj, VALUE n)
2497{
2498 long len = NUM2LONG(n);
2499
2500 if (len < 0) {
2501 rb_raise(rb_eArgError, "attempt to take negative size");
2502 }
2503
2504 n = LONG2NUM(len); /* no more conversion */
2505
2506 return lazy_add_method(obj, 0, 0, n, rb_ary_new3(1, n), &lazy_take_funcs);
2507}
2508
2509static struct MEMO *
2510lazy_take_while_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2511{
2512 VALUE take = lazyenum_yield_values(proc_entry, result);
2513 if (!RTEST(take)) {
2514 LAZY_MEMO_SET_BREAK(result);
2515 return 0;
2516 }
2517 return result;
2518}
2519
2520static const lazyenum_funcs lazy_take_while_funcs = {
2521 lazy_take_while_proc, 0,
2522};
2523
2524/*
2525 * call-seq:
2526 * lazy.take_while { |obj| block } -> lazy_enumerator
2527 *
2528 * Like Enumerable#take_while, but chains operation to be lazy-evaluated.
2529 */
2530
2531static VALUE
2532lazy_take_while(VALUE obj)
2533{
2534 LAZY_NEED_BLOCK(take_while);
2535 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_take_while_funcs);
2536}
2537
2538static VALUE
2539lazy_drop_size(VALUE proc_entry, VALUE receiver)
2540{
2541 long len = NUM2LONG(RARRAY_AREF(rb_ivar_get(proc_entry, id_arguments), 0));
2542 if (NIL_P(receiver))
2543 return receiver;
2544 if (FIXNUM_P(receiver)) {
2545 len = FIX2LONG(receiver) - len;
2546 return LONG2FIX(len < 0 ? 0 : len);
2547 }
2548 return rb_funcall(receiver, '-', 1, LONG2NUM(len));
2549}
2550
2551static struct MEMO *
2552lazy_drop_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2553{
2554 long remain;
2555 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2556 VALUE memo = rb_ary_entry(memos, memo_index);
2557
2558 if (NIL_P(memo)) {
2559 memo = entry->memo;
2560 }
2561 remain = NUM2LONG(memo);
2562 if (remain > 0) {
2563 --remain;
2564 rb_ary_store(memos, memo_index, LONG2NUM(remain));
2565 return 0;
2566 }
2567
2568 return result;
2569}
2570
2571static const lazyenum_funcs lazy_drop_funcs = {
2572 lazy_drop_proc, lazy_drop_size,
2573};
2574
2575/*
2576 * call-seq:
2577 * lazy.drop(n) -> lazy_enumerator
2578 *
2579 * Like Enumerable#drop, but chains operation to be lazy-evaluated.
2580 */
2581
2582static VALUE
2583lazy_drop(VALUE obj, VALUE n)
2584{
2585 long len = NUM2LONG(n);
2586 VALUE argv[2];
2587 argv[0] = sym_each;
2588 argv[1] = n;
2589
2590 if (len < 0) {
2591 rb_raise(rb_eArgError, "attempt to drop negative size");
2592 }
2593
2594 return lazy_add_method(obj, 2, argv, n, rb_ary_new3(1, n), &lazy_drop_funcs);
2595}
2596
2597static struct MEMO *
2598lazy_drop_while_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2599{
2600 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2601 VALUE memo = rb_ary_entry(memos, memo_index);
2602
2603 if (NIL_P(memo)) {
2604 memo = entry->memo;
2605 }
2606
2607 if (!RTEST(memo)) {
2608 VALUE drop = lazyenum_yield_values(proc_entry, result);
2609 if (RTEST(drop)) return 0;
2610 rb_ary_store(memos, memo_index, Qtrue);
2611 }
2612 return result;
2613}
2614
2615static const lazyenum_funcs lazy_drop_while_funcs = {
2616 lazy_drop_while_proc, 0,
2617};
2618
2619/*
2620 * call-seq:
2621 * lazy.drop_while { |obj| block } -> lazy_enumerator
2622 *
2623 * Like Enumerable#drop_while, but chains operation to be lazy-evaluated.
2624 */
2625
2626static VALUE
2627lazy_drop_while(VALUE obj)
2628{
2629 LAZY_NEED_BLOCK(drop_while);
2630 return lazy_add_method(obj, 0, 0, Qfalse, Qnil, &lazy_drop_while_funcs);
2631}
2632
2633static int
2634lazy_uniq_check(VALUE chain, VALUE memos, long memo_index)
2635{
2636 VALUE hash = rb_ary_entry(memos, memo_index);
2637
2638 if (NIL_P(hash)) {
2639 hash = rb_obj_hide(rb_hash_new());
2640 rb_ary_store(memos, memo_index, hash);
2641 }
2642
2643 return rb_hash_add_new_element(hash, chain, Qfalse);
2644}
2645
2646static struct MEMO *
2647lazy_uniq_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2648{
2649 if (lazy_uniq_check(result->memo_value, memos, memo_index)) return 0;
2650 return result;
2651}
2652
2653static struct MEMO *
2654lazy_uniq_iter_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2655{
2656 VALUE chain = lazyenum_yield(proc_entry, result);
2657
2658 if (lazy_uniq_check(chain, memos, memo_index)) return 0;
2659 return result;
2660}
2661
2662static const lazyenum_funcs lazy_uniq_iter_funcs = {
2663 lazy_uniq_iter_proc, 0,
2664};
2665
2666static const lazyenum_funcs lazy_uniq_funcs = {
2667 lazy_uniq_proc, 0,
2668};
2669
2670/*
2671 * call-seq:
2672 * lazy.uniq -> lazy_enumerator
2673 * lazy.uniq { |item| block } -> lazy_enumerator
2674 *
2675 * Like Enumerable#uniq, but chains operation to be lazy-evaluated.
2676 */
2677
2678static VALUE
2679lazy_uniq(VALUE obj)
2680{
2681 const lazyenum_funcs *const funcs =
2682 rb_block_given_p() ? &lazy_uniq_iter_funcs : &lazy_uniq_funcs;
2683 return lazy_add_method(obj, 0, 0, Qnil, Qnil, funcs);
2684}
2685
2686static struct MEMO *
2687lazy_compact_proc(VALUE proc_entry, struct MEMO *result, VALUE memos, long memo_index)
2688{
2689 if (NIL_P(result->memo_value)) return 0;
2690 return result;
2691}
2692
2693static const lazyenum_funcs lazy_compact_funcs = {
2694 lazy_compact_proc, 0,
2695};
2696
2697/*
2698 * call-seq:
2699 * lazy.compact -> lazy_enumerator
2700 *
2701 * Like Enumerable#compact, but chains operation to be lazy-evaluated.
2702 */
2703
2704static VALUE
2705lazy_compact(VALUE obj)
2706{
2707 return lazy_add_method(obj, 0, 0, Qnil, Qnil, &lazy_compact_funcs);
2708}
2709
2710static struct MEMO *
2711lazy_with_index_proc(VALUE proc_entry, struct MEMO* result, VALUE memos, long memo_index)
2712{
2713 struct proc_entry *entry = proc_entry_ptr(proc_entry);
2714 VALUE memo = rb_ary_entry(memos, memo_index);
2715 VALUE argv[2];
2716
2717 if (NIL_P(memo)) {
2718 memo = entry->memo;
2719 }
2720
2721 argv[0] = result->memo_value;
2722 argv[1] = memo;
2723 if (entry->proc) {
2724 rb_proc_call_with_block(entry->proc, 2, argv, Qnil);
2725 LAZY_MEMO_RESET_PACKED(result);
2726 }
2727 else {
2728 LAZY_MEMO_SET_VALUE(result, rb_ary_new_from_values(2, argv));
2729 LAZY_MEMO_SET_PACKED(result);
2730 }
2731 rb_ary_store(memos, memo_index, LONG2NUM(NUM2LONG(memo) + 1));
2732 return result;
2733}
2734
2735static VALUE
2736lazy_with_index_size(VALUE proc, VALUE receiver)
2737{
2738 return receiver;
2739}
2740
2741static const lazyenum_funcs lazy_with_index_funcs = {
2742 lazy_with_index_proc, lazy_with_index_size,
2743};
2744
2745/*
2746 * call-seq:
2747 * lazy.with_index(offset = 0) {|(*args), idx| block }
2748 * lazy.with_index(offset = 0)
2749 *
2750 * If a block is given, returns a lazy enumerator that will
2751 * iterate over the given block for each element
2752 * with an index, which starts from +offset+, and returns a
2753 * lazy enumerator that yields the same values (without the index).
2754 *
2755 * If a block is not given, returns a new lazy enumerator that
2756 * includes the index, starting from +offset+.
2757 *
2758 * +offset+:: the starting index to use
2759 *
2760 * See Enumerator#with_index.
2761 */
2762static VALUE
2763lazy_with_index(int argc, VALUE *argv, VALUE obj)
2764{
2765 VALUE memo;
2766
2767 rb_scan_args(argc, argv, "01", &memo);
2768 if (NIL_P(memo))
2769 memo = LONG2NUM(0);
2770
2771 return lazy_add_method(obj, 0, 0, memo, rb_ary_new_from_values(1, &memo), &lazy_with_index_funcs);
2772}
2773
2774#if 0 /* for RDoc */
2775
2776/*
2777 * call-seq:
2778 * lazy.chunk { |elt| ... } -> lazy_enumerator
2779 *
2780 * Like Enumerable#chunk, but chains operation to be lazy-evaluated.
2781 */
2782static VALUE
2783lazy_chunk(VALUE self)
2784{
2785}
2786
2787/*
2788 * call-seq:
2789 * lazy.chunk_while {|elt_before, elt_after| bool } -> lazy_enumerator
2790 *
2791 * Like Enumerable#chunk_while, but chains operation to be lazy-evaluated.
2792 */
2793static VALUE
2794lazy_chunk_while(VALUE self)
2795{
2796}
2797
2798/*
2799 * call-seq:
2800 * lazy.slice_after(pattern) -> lazy_enumerator
2801 * lazy.slice_after { |elt| bool } -> lazy_enumerator
2802 *
2803 * Like Enumerable#slice_after, but chains operation to be lazy-evaluated.
2804 */
2805static VALUE
2806lazy_slice_after(VALUE self)
2807{
2808}
2809
2810/*
2811 * call-seq:
2812 * lazy.slice_before(pattern) -> lazy_enumerator
2813 * lazy.slice_before { |elt| bool } -> lazy_enumerator
2814 *
2815 * Like Enumerable#slice_before, but chains operation to be lazy-evaluated.
2816 */
2817static VALUE
2818lazy_slice_before(VALUE self)
2819{
2820}
2821
2822/*
2823 * call-seq:
2824 * lazy.slice_when {|elt_before, elt_after| bool } -> lazy_enumerator
2825 *
2826 * Like Enumerable#slice_when, but chains operation to be lazy-evaluated.
2827 */
2828static VALUE
2829lazy_slice_when(VALUE self)
2830{
2831}
2832# endif
2833
2834static VALUE
2835lazy_super(int argc, VALUE *argv, VALUE lazy)
2836{
2837 return enumerable_lazy(rb_call_super(argc, argv));
2838}
2839
2840/*
2841 * call-seq:
2842 * enum.lazy -> lazy_enumerator
2843 *
2844 * Returns self.
2845 */
2846
2847static VALUE
2848lazy_lazy(VALUE obj)
2849{
2850 return obj;
2851}
2852
2853/*
2854 * Document-class: StopIteration
2855 *
2856 * Raised to stop the iteration, in particular by Enumerator#next. It is
2857 * rescued by Kernel#loop.
2858 *
2859 * loop do
2860 * puts "Hello"
2861 * raise StopIteration
2862 * puts "World"
2863 * end
2864 * puts "Done!"
2865 *
2866 * <em>produces:</em>
2867 *
2868 * Hello
2869 * Done!
2870 */
2871
2872/*
2873 * call-seq:
2874 * result -> value
2875 *
2876 * Returns the return value of the iterator.
2877 *
2878 * o = Object.new
2879 * def o.each
2880 * yield 1
2881 * yield 2
2882 * yield 3
2883 * 100
2884 * end
2885 *
2886 * e = o.to_enum
2887 *
2888 * puts e.next #=> 1
2889 * puts e.next #=> 2
2890 * puts e.next #=> 3
2891 *
2892 * begin
2893 * e.next
2894 * rescue StopIteration => ex
2895 * puts ex.result #=> 100
2896 * end
2897 *
2898 */
2899
2900static VALUE
2901stop_result(VALUE self)
2902{
2903 return rb_attr_get(self, id_result);
2904}
2905
2906/*
2907 * Producer
2908 */
2909
2910static void
2911producer_mark_and_move(void *p)
2912{
2913 struct producer *ptr = p;
2914 rb_gc_mark_and_move(&ptr->init);
2915 rb_gc_mark_and_move(&ptr->proc);
2916 rb_gc_mark_and_move(&ptr->size);
2917}
2918
2919#define producer_free RUBY_TYPED_DEFAULT_FREE
2920
2921static size_t
2922producer_memsize(const void *p)
2923{
2924 return sizeof(struct producer);
2925}
2926
2927static const rb_data_type_t producer_data_type = {
2928 "producer",
2929 {
2930 producer_mark_and_move,
2931 producer_free,
2932 producer_memsize,
2933 producer_mark_and_move,
2934 },
2935 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_EMBEDDABLE
2936};
2937
2938static struct producer *
2939producer_ptr(VALUE obj)
2940{
2941 struct producer *ptr;
2942
2943 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2944 if (!ptr || UNDEF_P(ptr->proc)) {
2945 rb_raise(rb_eArgError, "uninitialized producer");
2946 }
2947 return ptr;
2948}
2949
2950/* :nodoc: */
2951static VALUE
2952producer_allocate(VALUE klass)
2953{
2954 struct producer *ptr;
2955 VALUE obj;
2956
2957 obj = TypedData_Make_Struct(klass, struct producer, &producer_data_type, ptr);
2958 ptr->init = Qundef;
2959 ptr->proc = Qundef;
2960 ptr->size = Qnil;
2961
2962 return obj;
2963}
2964
2965static VALUE
2966producer_init(VALUE obj, VALUE init, VALUE proc, VALUE size)
2967{
2968 struct producer *ptr;
2969
2970 TypedData_Get_Struct(obj, struct producer, &producer_data_type, ptr);
2971
2972 if (!ptr) {
2973 rb_raise(rb_eArgError, "unallocated producer");
2974 }
2975
2976 RB_OBJ_WRITE(obj, &ptr->init, init);
2977 RB_OBJ_WRITE(obj, &ptr->proc, proc);
2978 RB_OBJ_WRITE(obj, &ptr->size, size);
2979
2980 return obj;
2981}
2982
2983static VALUE
2984producer_each_stop(VALUE dummy, VALUE exc)
2985{
2986 return rb_attr_get(exc, id_result);
2987}
2988
2989NORETURN(static VALUE producer_each_i(VALUE obj));
2990
2991static VALUE
2992producer_each_i(VALUE obj)
2993{
2994 struct producer *ptr;
2995 VALUE init, proc, curr;
2996
2997 ptr = producer_ptr(obj);
2998 init = ptr->init;
2999 proc = ptr->proc;
3000
3001 if (UNDEF_P(init)) {
3002 curr = Qnil;
3003 }
3004 else {
3005 rb_yield(init);
3006 curr = init;
3007 }
3008
3009 for (;;) {
3010 curr = rb_funcall(proc, id_call, 1, curr);
3011 rb_yield(curr);
3012 }
3013
3015}
3016
3017/* :nodoc: */
3018static VALUE
3019producer_each(VALUE obj)
3020{
3021 rb_need_block();
3022
3023 return rb_rescue2(producer_each_i, obj, producer_each_stop, (VALUE)0, rb_eStopIteration, (VALUE)0);
3024}
3025
3026static VALUE
3027producer_size(VALUE obj, VALUE args, VALUE eobj)
3028{
3029 struct producer *ptr = producer_ptr(obj);
3030 VALUE size = ptr->size;
3031
3032 if (NIL_P(size)) return Qnil;
3033 if (RB_INTEGER_TYPE_P(size) || RB_FLOAT_TYPE_P(size)) return size;
3034
3035 return rb_funcall(size, id_call, 0);
3036}
3037
3038/*
3039 * call-seq:
3040 * Enumerator.produce(initial = nil, size: nil) { |prev| block } -> enumerator
3041 *
3042 * Creates an infinite enumerator from any block, just called over and
3043 * over. The result of the previous iteration is passed to the next one.
3044 * If +initial+ is provided, it is passed to the first iteration, and
3045 * becomes the first element of the enumerator; if it is not provided,
3046 * the first iteration receives +nil+, and its result becomes the first
3047 * element of the iterator.
3048 *
3049 * Raising StopIteration from the block stops an iteration.
3050 *
3051 * Enumerator.produce(1, &:succ) # => enumerator of 1, 2, 3, 4, ....
3052 *
3053 * Enumerator.produce { rand(10) } # => infinite random number sequence
3054 *
3055 * ancestors = Enumerator.produce(node) { |prev| node = prev.parent or raise StopIteration }
3056 * enclosing_section = ancestors.find { |n| n.type == :section }
3057 *
3058 * Using ::produce together with Enumerable methods like Enumerable#detect,
3059 * Enumerable#slice_after, Enumerable#take_while can provide Enumerator-based alternatives
3060 * for +while+ and +until+ cycles:
3061 *
3062 * # Find next Tuesday
3063 * require "date"
3064 * Enumerator.produce(Date.today, &:succ).detect(&:tuesday?)
3065 *
3066 * # Simple lexer:
3067 * require "strscan"
3068 * scanner = StringScanner.new("7+38/6")
3069 * PATTERN = %r{\d+|[-/+*]}
3070 * Enumerator.produce { scanner.scan(PATTERN) }.slice_after { scanner.eos? }.first
3071 * # => ["7", "+", "38", "/", "6"]
3072 *
3073 * The optional +size+ keyword argument specifies the size of the enumerator,
3074 * which can be retrieved by Enumerator#size. It can be an integer,
3075 * +Float::INFINITY+, a callable object (such as a lambda), or +nil+ to
3076 * indicate unknown size. When not specified, the size defaults to
3077 * +Float::INFINITY+.
3078 *
3079 * # Infinite enumerator
3080 * enum = Enumerator.produce(1, size: Float::INFINITY, &:succ)
3081 * enum.size # => Float::INFINITY
3082 *
3083 * # Finite enumerator with known/computable size
3084 * abs_dir = File.expand_path("./baz") # => "/foo/bar/baz"
3085 * traverser = Enumerator.produce(abs_dir, size: -> { abs_dir.count("/") + 1 }) {
3086 * raise StopIteration if it == "/"
3087 * File.dirname(it)
3088 * }
3089 * traverser.size # => 4
3090 *
3091 * # Finite enumerator with unknown size
3092 * calendar = Enumerator.produce(Date.today, size: nil) {
3093 * it.monday? ? raise(StopIteration) : it + 1
3094 * }
3095 * calendar.size # => nil
3096 */
3097static VALUE
3098enumerator_s_produce(int argc, VALUE *argv, VALUE klass)
3099{
3100 VALUE init, producer, opts, size;
3101 ID keyword_ids[1];
3102
3103 if (!rb_block_given_p()) rb_raise(rb_eArgError, "no block given");
3104
3105 keyword_ids[0] = rb_intern("size");
3106 rb_scan_args_kw(RB_SCAN_ARGS_LAST_HASH_KEYWORDS, argc, argv, "01:", &init, &opts);
3107 rb_get_kwargs(opts, keyword_ids, 0, 1, &size);
3108
3109 size = UNDEF_P(size) ? DBL2NUM(HUGE_VAL) : convert_to_feasible_size_value(size);
3110
3111 if (argc == 0 || (argc == 1 && !NIL_P(opts))) {
3112 init = Qundef;
3113 }
3114
3115 producer = producer_init(producer_allocate(rb_cEnumProducer), init, rb_block_proc(), size);
3116
3117 return rb_enumeratorize_with_size_kw(producer, sym_each, 0, 0, producer_size, RB_NO_KEYWORDS);
3118}
3119
3120/*
3121 * Document-class: Enumerator::Chain
3122 *
3123 * Enumerator::Chain is a subclass of Enumerator, which represents a
3124 * chain of enumerables that works as a single enumerator.
3125 *
3126 * This type of objects can be created by Enumerable#chain and
3127 * Enumerator#+.
3128 */
3129
3130static void
3131enum_chain_mark_and_move(void *p)
3132{
3133 struct enum_chain *ptr = p;
3134 rb_gc_mark_and_move(&ptr->enums);
3135}
3136
3137#define enum_chain_free RUBY_TYPED_DEFAULT_FREE
3138
3139static size_t
3140enum_chain_memsize(const void *p)
3141{
3142 return sizeof(struct enum_chain);
3143}
3144
3145static const rb_data_type_t enum_chain_data_type = {
3146 "chain",
3147 {
3148 enum_chain_mark_and_move,
3149 enum_chain_free,
3150 enum_chain_memsize,
3151 enum_chain_mark_and_move,
3152 },
3153 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3154};
3155
3156static struct enum_chain *
3157enum_chain_ptr(VALUE obj)
3158{
3159 struct enum_chain *ptr;
3160
3161 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3162 if (!ptr || UNDEF_P(ptr->enums)) {
3163 rb_raise(rb_eArgError, "uninitialized chain");
3164 }
3165 return ptr;
3166}
3167
3168/* :nodoc: */
3169static VALUE
3170enum_chain_allocate(VALUE klass)
3171{
3172 struct enum_chain *ptr;
3173 VALUE obj;
3174
3175 obj = TypedData_Make_Struct(klass, struct enum_chain, &enum_chain_data_type, ptr);
3176 ptr->enums = Qundef;
3177 ptr->pos = -1;
3178
3179 return obj;
3180}
3181
3182/*
3183 * call-seq:
3184 * Enumerator::Chain.new(*enums) -> enum
3185 *
3186 * Generates a new enumerator object that iterates over the elements
3187 * of given enumerable objects in sequence.
3188 *
3189 * e = Enumerator::Chain.new(1..3, [4, 5])
3190 * e.to_a #=> [1, 2, 3, 4, 5]
3191 * e.size #=> 5
3192 */
3193static VALUE
3194enum_chain_initialize(VALUE obj, VALUE enums)
3195{
3196 struct enum_chain *ptr;
3197
3198 rb_check_frozen(obj);
3199 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3200
3201 if (!ptr) rb_raise(rb_eArgError, "unallocated chain");
3202
3203 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3204 ptr->pos = -1;
3205
3206 return obj;
3207}
3208
3209static VALUE
3210new_enum_chain(VALUE enums)
3211{
3212 long i;
3213 VALUE obj = enum_chain_initialize(enum_chain_allocate(rb_cEnumChain), enums);
3214
3215 for (i = 0; i < RARRAY_LEN(enums); i++) {
3216 if (RTEST(rb_obj_is_kind_of(RARRAY_AREF(enums, i), rb_cLazy))) {
3217 return enumerable_lazy(obj);
3218 }
3219 }
3220
3221 return obj;
3222}
3223
3224/* :nodoc: */
3225static VALUE
3226enum_chain_init_copy(VALUE obj, VALUE orig)
3227{
3228 struct enum_chain *ptr0, *ptr1;
3229
3230 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3231 ptr0 = enum_chain_ptr(orig);
3232
3233 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr1);
3234
3235 if (!ptr1) rb_raise(rb_eArgError, "unallocated chain");
3236
3237 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3238 ptr1->pos = ptr0->pos;
3239
3240 return obj;
3241}
3242
3243static VALUE
3244enum_chain_total_size(VALUE enums)
3245{
3246 VALUE total = INT2FIX(0);
3247 long i;
3248
3249 for (i = 0; i < RARRAY_LEN(enums); i++) {
3250 VALUE size = enum_size(RARRAY_AREF(enums, i));
3251
3252 if (NIL_P(size) || (RB_FLOAT_TYPE_P(size) && isinf(NUM2DBL(size)))) {
3253 return size;
3254 }
3255 if (!RB_INTEGER_TYPE_P(size)) {
3256 return Qnil;
3257 }
3258
3259 total = rb_funcall(total, '+', 1, size);
3260 }
3261
3262 return total;
3263}
3264
3265/*
3266 * call-seq:
3267 * obj.size -> int, Float::INFINITY or nil
3268 *
3269 * Returns the total size of the enumerator chain calculated by
3270 * summing up the size of each enumerable in the chain. If any of the
3271 * enumerables reports its size as nil or Float::INFINITY, that value
3272 * is returned as the total size.
3273 */
3274static VALUE
3275enum_chain_size(VALUE obj)
3276{
3277 return enum_chain_total_size(enum_chain_ptr(obj)->enums);
3278}
3279
3280static VALUE
3281enum_chain_enum_size(VALUE obj, VALUE args, VALUE eobj)
3282{
3283 return enum_chain_size(obj);
3284}
3285
3286static VALUE
3287enum_chain_enum_no_size(VALUE obj, VALUE args, VALUE eobj)
3288{
3289 return Qnil;
3290}
3291
3292/*
3293 * call-seq:
3294 * obj.each(*args) { |...| ... } -> obj
3295 * obj.each(*args) -> enumerator
3296 *
3297 * Iterates over the elements of the first enumerable by calling the
3298 * "each" method on it with the given arguments, then proceeds to the
3299 * following enumerables in sequence until all of the enumerables are
3300 * exhausted.
3301 *
3302 * If no block is given, returns an enumerator.
3303 */
3304static VALUE
3305enum_chain_each(int argc, VALUE *argv, VALUE obj)
3306{
3307 VALUE enums, block;
3308 struct enum_chain *objptr;
3309 long i;
3310
3311 RETURN_SIZED_ENUMERATOR(obj, argc, argv, argc > 0 ? enum_chain_enum_no_size : enum_chain_enum_size);
3312
3313 objptr = enum_chain_ptr(obj);
3314 enums = objptr->enums;
3315 block = rb_block_proc();
3316
3317 for (i = 0; i < RARRAY_LEN(enums); i++) {
3318 objptr->pos = i;
3319 rb_funcall_with_block(RARRAY_AREF(enums, i), id_each, argc, argv, block);
3320 }
3321
3322 return obj;
3323}
3324
3325/*
3326 * call-seq:
3327 * obj.rewind -> obj
3328 *
3329 * Rewinds the enumerator chain by calling the "rewind" method on each
3330 * enumerable in reverse order. Each call is performed only if the
3331 * enumerable responds to the method.
3332 */
3333static VALUE
3334enum_chain_rewind(VALUE obj)
3335{
3336 struct enum_chain *objptr = enum_chain_ptr(obj);
3337 VALUE enums = objptr->enums;
3338 long i;
3339
3340 for (i = objptr->pos; 0 <= i && i < RARRAY_LEN(enums); objptr->pos = --i) {
3341 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3342 }
3343
3344 return obj;
3345}
3346
3347static VALUE
3348inspect_enum_chain(VALUE obj, VALUE dummy, int recur)
3349{
3350 VALUE klass = rb_obj_class(obj);
3351 struct enum_chain *ptr;
3352
3353 TypedData_Get_Struct(obj, struct enum_chain, &enum_chain_data_type, ptr);
3354
3355 if (!ptr || UNDEF_P(ptr->enums)) {
3356 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3357 }
3358
3359 if (recur) {
3360 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3361 }
3362
3363 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3364}
3365
3366/*
3367 * call-seq:
3368 * obj.inspect -> string
3369 *
3370 * Returns a printable version of the enumerator chain.
3371 */
3372static VALUE
3373enum_chain_inspect(VALUE obj)
3374{
3375 return rb_exec_recursive(inspect_enum_chain, obj, 0);
3376}
3377
3378/*
3379 * call-seq:
3380 * e.chain(*enums) -> enumerator
3381 *
3382 * Returns an enumerator object generated from this enumerator and
3383 * given enumerables.
3384 *
3385 * e = (1..3).chain([4, 5])
3386 * e.to_a #=> [1, 2, 3, 4, 5]
3387 */
3388static VALUE
3389enum_chain(int argc, VALUE *argv, VALUE obj)
3390{
3391 VALUE enums = rb_ary_new_from_values(1, &obj);
3392 rb_ary_cat(enums, argv, argc);
3393 return new_enum_chain(enums);
3394}
3395
3396/*
3397 * call-seq:
3398 * e + enum -> enumerator
3399 *
3400 * Returns an enumerator object generated from this enumerator and a
3401 * given enumerable.
3402 *
3403 * e = (1..3).each + [4, 5]
3404 * e.to_a #=> [1, 2, 3, 4, 5]
3405 */
3406static VALUE
3407enumerator_plus(VALUE obj, VALUE eobj)
3408{
3409 return new_enum_chain(rb_ary_new_from_args(2, obj, eobj));
3410}
3411
3412/*
3413 * Document-class: Enumerator::Product
3414 *
3415 * Enumerator::Product generates a Cartesian product of any number of
3416 * enumerable objects. Iterating over the product of enumerable
3417 * objects is roughly equivalent to nested each_entry loops where the
3418 * loop for the rightmost object is put innermost.
3419 *
3420 * innings = Enumerator::Product.new(1..9, ['top', 'bottom'])
3421 *
3422 * innings.each do |i, h|
3423 * p [i, h]
3424 * end
3425 * # [1, "top"]
3426 * # [1, "bottom"]
3427 * # [2, "top"]
3428 * # [2, "bottom"]
3429 * # [3, "top"]
3430 * # [3, "bottom"]
3431 * # ...
3432 * # [9, "top"]
3433 * # [9, "bottom"]
3434 *
3435 * The method used against each enumerable object is `each_entry`
3436 * instead of `each` so that the product of N enumerable objects
3437 * yields an array of exactly N elements in each iteration.
3438 *
3439 * When no enumerator is given, it calls a given block once yielding
3440 * an empty argument list.
3441 *
3442 * This type of objects can be created by Enumerator.product.
3443 */
3444
3445static void
3446enum_product_mark_and_move(void *p)
3447{
3448 struct enum_product *ptr = p;
3449 rb_gc_mark_and_move(&ptr->enums);
3450}
3451
3452#define enum_product_free RUBY_TYPED_DEFAULT_FREE
3453
3454static size_t
3455enum_product_memsize(const void *p)
3456{
3457 return sizeof(struct enum_product);
3458}
3459
3460static const rb_data_type_t enum_product_data_type = {
3461 "product",
3462 {
3463 enum_product_mark_and_move,
3464 enum_product_free,
3465 enum_product_memsize,
3466 enum_product_mark_and_move,
3467 },
3468 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
3469};
3470
3471static struct enum_product *
3472enum_product_ptr(VALUE obj)
3473{
3474 struct enum_product *ptr;
3475
3476 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3477 if (!ptr || UNDEF_P(ptr->enums)) {
3478 rb_raise(rb_eArgError, "uninitialized product");
3479 }
3480 return ptr;
3481}
3482
3483/* :nodoc: */
3484static VALUE
3485enum_product_allocate(VALUE klass)
3486{
3487 struct enum_product *ptr;
3488 VALUE obj;
3489
3490 obj = TypedData_Make_Struct(klass, struct enum_product, &enum_product_data_type, ptr);
3491 ptr->enums = Qundef;
3492
3493 return obj;
3494}
3495
3496/*
3497 * call-seq:
3498 * Enumerator::Product.new(*enums) -> enum
3499 *
3500 * Generates a new enumerator object that generates a Cartesian
3501 * product of given enumerable objects.
3502 *
3503 * e = Enumerator::Product.new(1..3, [4, 5])
3504 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3505 * e.size #=> 6
3506 */
3507static VALUE
3508enum_product_initialize(int argc, VALUE *argv, VALUE obj)
3509{
3510 struct enum_product *ptr;
3511 VALUE enums = Qnil, options = Qnil;
3512
3513 rb_scan_args(argc, argv, "*:", &enums, &options);
3514
3515 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3516 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3517 }
3518
3519 rb_check_frozen(obj);
3520 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3521
3522 if (!ptr) rb_raise(rb_eArgError, "unallocated product");
3523
3524 RB_OBJ_WRITE(obj, &ptr->enums, rb_ary_freeze(enums));
3525
3526 return obj;
3527}
3528
3529/* :nodoc: */
3530static VALUE
3531enum_product_init_copy(VALUE obj, VALUE orig)
3532{
3533 struct enum_product *ptr0, *ptr1;
3534
3535 if (!OBJ_INIT_COPY(obj, orig)) return obj;
3536 ptr0 = enum_product_ptr(orig);
3537
3538 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr1);
3539
3540 if (!ptr1) rb_raise(rb_eArgError, "unallocated product");
3541
3542 RB_OBJ_WRITE(obj, &ptr1->enums, ptr0->enums);
3543
3544 return obj;
3545}
3546
3547static VALUE
3548enum_product_total_size(VALUE enums)
3549{
3550 VALUE total = INT2FIX(1);
3551 VALUE sizes = rb_ary_hidden_new(RARRAY_LEN(enums));
3552 long i;
3553
3554 for (i = 0; i < RARRAY_LEN(enums); i++) {
3555 VALUE size = enum_size(RARRAY_AREF(enums, i));
3556 if (size == INT2FIX(0)) {
3557 rb_ary_resize(sizes, 0);
3558 return size;
3559 }
3560 rb_ary_push(sizes, size);
3561 }
3562 for (i = 0; i < RARRAY_LEN(sizes); i++) {
3563 VALUE size = RARRAY_AREF(sizes, i);
3564
3565 if (NIL_P(size) || (RB_TYPE_P(size, T_FLOAT) && isinf(NUM2DBL(size)))) {
3566 return size;
3567 }
3568 if (!RB_INTEGER_TYPE_P(size)) {
3569 return Qnil;
3570 }
3571
3572 total = rb_funcall(total, '*', 1, size);
3573 }
3574
3575 return total;
3576}
3577
3578/*
3579 * call-seq:
3580 * obj.size -> int, Float::INFINITY or nil
3581 *
3582 * Returns the total size of the enumerator product calculated by
3583 * multiplying the sizes of enumerables in the product. If any of the
3584 * enumerables reports its size as nil or Float::INFINITY, that value
3585 * is returned as the size.
3586 */
3587static VALUE
3588enum_product_size(VALUE obj)
3589{
3590 return enum_product_total_size(enum_product_ptr(obj)->enums);
3591}
3592
3593static VALUE
3594enum_product_enum_size(VALUE obj, VALUE args, VALUE eobj)
3595{
3596 return enum_product_size(obj);
3597}
3598
3600 VALUE obj;
3601 VALUE block;
3602 int index;
3603 int argc;
3604 VALUE *argv;
3605};
3606
3607static VALUE product_each(VALUE, struct product_state *);
3608
3609static VALUE
3610product_each_i(RB_BLOCK_CALL_FUNC_ARGLIST(value, state))
3611{
3612 struct product_state *pstate = (struct product_state *)state;
3613 pstate->argv[pstate->index++] = value;
3614
3615 VALUE val = product_each(pstate->obj, pstate);
3616 pstate->index--;
3617 return val;
3618}
3619
3620static VALUE
3621product_each(VALUE obj, struct product_state *pstate)
3622{
3623 struct enum_product *ptr = enum_product_ptr(obj);
3624 VALUE enums = ptr->enums;
3625
3626 if (pstate->index < pstate->argc) {
3627 VALUE eobj = RARRAY_AREF(enums, pstate->index);
3628
3629 rb_block_call(eobj, id_each_entry, 0, NULL, product_each_i, (VALUE)pstate);
3630 }
3631 else {
3632 rb_funcall(pstate->block, id_call, 1, rb_ary_new_from_values(pstate->argc, pstate->argv));
3633 }
3634
3635 return obj;
3636}
3637
3638static VALUE
3639enum_product_run(VALUE obj, VALUE block)
3640{
3641 struct enum_product *ptr = enum_product_ptr(obj);
3642 int argc = RARRAY_LENINT(ptr->enums);
3643 if (argc == 0) { /* no need to allocate state.argv */
3644 rb_funcall(block, id_call, 1, rb_ary_new());
3645 return obj;
3646 }
3647
3648 VALUE argsbuf = 0;
3649 struct product_state state = {
3650 .obj = obj,
3651 .block = block,
3652 .index = 0,
3653 .argc = argc,
3654 .argv = ALLOCV_N(VALUE, argsbuf, argc),
3655 };
3656
3657 VALUE ret = product_each(obj, &state);
3658 ALLOCV_END(argsbuf);
3659 return ret;
3660}
3661
3662/*
3663 * call-seq:
3664 * obj.each { |...| ... } -> obj
3665 * obj.each -> enumerator
3666 *
3667 * Iterates over the elements of the first enumerable by calling the
3668 * "each_entry" method on it with the given arguments, then proceeds
3669 * to the following enumerables in sequence until all of the
3670 * enumerables are exhausted.
3671 *
3672 * If no block is given, returns an enumerator. Otherwise, returns self.
3673 */
3674static VALUE
3675enum_product_each(VALUE obj)
3676{
3677 RETURN_SIZED_ENUMERATOR(obj, 0, 0, enum_product_enum_size);
3678
3679 return enum_product_run(obj, rb_block_proc());
3680}
3681
3682/*
3683 * call-seq:
3684 * obj.rewind -> obj
3685 *
3686 * Rewinds the product enumerator by calling the "rewind" method on
3687 * each enumerable in reverse order. Each call is performed only if
3688 * the enumerable responds to the method.
3689 */
3690static VALUE
3691enum_product_rewind(VALUE obj)
3692{
3693 struct enum_product *ptr = enum_product_ptr(obj);
3694 VALUE enums = ptr->enums;
3695 long i;
3696
3697 for (i = 0; i < RARRAY_LEN(enums); i++) {
3698 rb_check_funcall(RARRAY_AREF(enums, i), id_rewind, 0, 0);
3699 }
3700
3701 return obj;
3702}
3703
3704static VALUE
3705inspect_enum_product(VALUE obj, VALUE dummy, int recur)
3706{
3707 VALUE klass = rb_obj_class(obj);
3708 struct enum_product *ptr;
3709
3710 TypedData_Get_Struct(obj, struct enum_product, &enum_product_data_type, ptr);
3711
3712 if (!ptr || UNDEF_P(ptr->enums)) {
3713 return rb_sprintf("#<%"PRIsVALUE": uninitialized>", rb_class_path(klass));
3714 }
3715
3716 if (recur) {
3717 return rb_sprintf("#<%"PRIsVALUE": ...>", rb_class_path(klass));
3718 }
3719
3720 return rb_sprintf("#<%"PRIsVALUE": %+"PRIsVALUE">", rb_class_path(klass), ptr->enums);
3721}
3722
3723/*
3724 * call-seq:
3725 * obj.inspect -> string
3726 *
3727 * Returns a printable version of the product enumerator.
3728 */
3729static VALUE
3730enum_product_inspect(VALUE obj)
3731{
3732 return rb_exec_recursive(inspect_enum_product, obj, 0);
3733}
3734
3735/*
3736 * call-seq:
3737 * Enumerator.product(*enums) -> enumerator
3738 * Enumerator.product(*enums) { |elts| ... } -> enumerator
3739 *
3740 * Generates a new enumerator object that generates a Cartesian
3741 * product of given enumerable objects. This is equivalent to
3742 * Enumerator::Product.new.
3743 *
3744 * e = Enumerator.product(1..3, [4, 5])
3745 * e.to_a #=> [[1, 4], [1, 5], [2, 4], [2, 5], [3, 4], [3, 5]]
3746 * e.size #=> 6
3747 *
3748 * When a block is given, calls the block with each N-element array
3749 * generated and returns +nil+.
3750 */
3751static VALUE
3752enumerator_s_product(int argc, VALUE *argv, VALUE klass)
3753{
3754 VALUE enums = Qnil, options = Qnil, block = Qnil;
3755
3756 rb_scan_args(argc, argv, "*:&", &enums, &options, &block);
3757
3758 if (!NIL_P(options) && !RHASH_EMPTY_P(options)) {
3759 rb_exc_raise(rb_keyword_error_new("unknown", rb_hash_keys(options)));
3760 }
3761
3762 VALUE obj = enum_product_initialize(argc, argv, enum_product_allocate(rb_cEnumProduct));
3763
3764 if (!NIL_P(block)) {
3765 enum_product_run(obj, block);
3766 return Qnil;
3767 }
3768
3769 return obj;
3770}
3771
3773 struct enumerator enumerator;
3774 VALUE begin;
3775 VALUE end;
3776 VALUE step;
3777 bool exclude_end;
3778};
3779
3780RUBY_REFERENCES(arith_seq_refs) = {
3781 RUBY_REF_EDGE(struct enumerator, obj),
3782 RUBY_REF_EDGE(struct enumerator, args),
3783 RUBY_REF_EDGE(struct enumerator, fib),
3784 RUBY_REF_EDGE(struct enumerator, dst),
3785 RUBY_REF_EDGE(struct enumerator, lookahead),
3786 RUBY_REF_EDGE(struct enumerator, feedvalue),
3787 RUBY_REF_EDGE(struct enumerator, stop_exc),
3788 RUBY_REF_EDGE(struct enumerator, size),
3789 RUBY_REF_EDGE(struct enumerator, procs),
3790
3791 RUBY_REF_EDGE(struct arith_seq, begin),
3792 RUBY_REF_EDGE(struct arith_seq, end),
3793 RUBY_REF_EDGE(struct arith_seq, step),
3794 RUBY_REF_END
3795};
3796
3797static const rb_data_type_t arith_seq_data_type = {
3798 "arithmetic_sequence",
3799 {
3800 RUBY_REFS_LIST_PTR(arith_seq_refs),
3802 NULL, // Nothing allocated externally, so don't need a memsize function
3803 NULL,
3804 },
3805 .parent = &enumerator_data_type,
3806 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED | RUBY_TYPED_DECL_MARKING | RUBY_TYPED_EMBEDDABLE
3807};
3808
3809static VALUE
3810arith_seq_allocate(VALUE klass)
3811{
3812 struct arith_seq *ptr;
3813 VALUE enum_obj;
3814
3815 enum_obj = TypedData_Make_Struct(klass, struct arith_seq, &arith_seq_data_type, ptr);
3816 ptr->enumerator.obj = Qundef;
3817
3818 return enum_obj;
3819}
3820
3821/*
3822 * Document-class: Enumerator::ArithmeticSequence
3823 *
3824 * Enumerator::ArithmeticSequence is a subclass of Enumerator,
3825 * that is a representation of sequences of numbers with common difference.
3826 * Instances of this class can be generated by the Range#step and Numeric#step
3827 * methods.
3828 *
3829 * The class can be used for slicing Array (see Array#slice) or custom
3830 * collections.
3831 */
3832
3833VALUE
3834rb_arith_seq_new(VALUE obj, VALUE meth, int argc, VALUE const *argv,
3835 rb_enumerator_size_func *size_fn,
3836 VALUE beg, VALUE end, VALUE step, int excl)
3837{
3838 VALUE aseq = enumerator_init(arith_seq_allocate(rb_cArithSeq),
3839 obj, meth, argc, argv, size_fn, Qnil, rb_keyword_given_p());
3840 struct arith_seq *ptr;
3841 TypedData_Get_Struct(aseq, struct arith_seq, &enumerator_data_type, ptr);
3842
3843 RB_OBJ_WRITE(aseq, &ptr->begin, beg);
3844 RB_OBJ_WRITE(aseq, &ptr->end, end);
3845 RB_OBJ_WRITE(aseq, &ptr->step, step);
3846 ptr->exclude_end = excl;
3847
3848 return aseq;
3849}
3850
3851/*
3852 * call-seq: aseq.begin -> num or nil
3853 *
3854 * Returns the number that defines the first element of this arithmetic
3855 * sequence.
3856 */
3857static inline VALUE
3858arith_seq_begin(VALUE self)
3859{
3860 struct arith_seq *ptr;
3861 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3862 return ptr->begin;
3863}
3864
3865/*
3866 * call-seq: aseq.end -> num or nil
3867 *
3868 * Returns the number that defines the end of this arithmetic sequence.
3869 */
3870static inline VALUE
3871arith_seq_end(VALUE self)
3872{
3873 struct arith_seq *ptr;
3874 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3875 return ptr->end;
3876}
3877
3878/*
3879 * call-seq: aseq.step -> num
3880 *
3881 * Returns the number that defines the common difference between
3882 * two adjacent elements in this arithmetic sequence.
3883 */
3884static inline VALUE
3885arith_seq_step(VALUE self)
3886{
3887 struct arith_seq *ptr;
3888 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3889 return ptr->step;
3890}
3891
3892/*
3893 * call-seq: aseq.exclude_end? -> true or false
3894 *
3895 * Returns <code>true</code> if this arithmetic sequence excludes its end value.
3896 */
3897static inline VALUE
3898arith_seq_exclude_end(VALUE self)
3899{
3900 struct arith_seq *ptr;
3901 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3902 return RBOOL(ptr->exclude_end);
3903}
3904
3905static inline int
3906arith_seq_exclude_end_p(VALUE self)
3907{
3908 struct arith_seq *ptr;
3909 TypedData_Get_Struct(self, struct arith_seq, &enumerator_data_type, ptr);
3910 return ptr->exclude_end;
3911}
3912
3913int
3914rb_arithmetic_sequence_extract(VALUE obj, rb_arithmetic_sequence_components_t *component)
3915{
3916 if (rb_obj_is_kind_of(obj, rb_cArithSeq)) {
3917 component->begin = arith_seq_begin(obj);
3918 component->end = arith_seq_end(obj);
3919 component->step = arith_seq_step(obj);
3920 component->exclude_end = arith_seq_exclude_end_p(obj);
3921 return 1;
3922 }
3923 else if (rb_range_values(obj, &component->begin, &component->end, &component->exclude_end)) {
3924 component->step = INT2FIX(1);
3925 return 1;
3926 }
3927
3928 return 0;
3929}
3930
3931VALUE
3932rb_arithmetic_sequence_beg_len_step(VALUE obj, long *begp, long *lenp, long *stepp, long len, int err)
3933{
3934 RBIMPL_NONNULL_ARG(begp);
3935 RBIMPL_NONNULL_ARG(lenp);
3936 RBIMPL_NONNULL_ARG(stepp);
3937
3939 if (!rb_arithmetic_sequence_extract(obj, &aseq)) {
3940 return Qfalse;
3941 }
3942
3943 long step = NIL_P(aseq.step) ? 1 : NUM2LONG(aseq.step);
3944 *stepp = step;
3945
3946 if (step < 0) {
3947 if (aseq.exclude_end && !NIL_P(aseq.end)) {
3948 /* Handle exclusion before range reversal */
3949 aseq.end = LONG2NUM(NUM2LONG(aseq.end) + 1);
3950
3951 /* Don't exclude the previous beginning */
3952 aseq.exclude_end = 0;
3953 }
3954 VALUE tmp = aseq.begin;
3955 aseq.begin = aseq.end;
3956 aseq.end = tmp;
3957 }
3958
3959 if (err == 0 && (step < -1 || step > 1)) {
3960 if (rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, 1) == Qtrue) {
3961 if (*begp > len)
3962 goto out_of_range;
3963 if (*lenp > len)
3964 goto out_of_range;
3965 return Qtrue;
3966 }
3967 }
3968 else {
3969 return rb_range_component_beg_len(aseq.begin, aseq.end, aseq.exclude_end, begp, lenp, len, err);
3970 }
3971
3972 out_of_range:
3973 rb_raise(rb_eRangeError, "%+"PRIsVALUE" out of range", obj);
3974 return Qnil;
3975}
3976
3977static VALUE
3978arith_seq_take(VALUE self, VALUE num)
3979{
3980 VALUE b, e, s, ary;
3981 long n;
3982 int x;
3983
3984 n = NUM2LONG(num);
3985 if (n < 0) {
3986 rb_raise(rb_eArgError, "attempt to take negative size");
3987 }
3988 if (n == 0) {
3989 return rb_ary_new_capa(0);
3990 }
3991
3992 b = arith_seq_begin(self);
3993 e = arith_seq_end(self);
3994 s = arith_seq_step(self);
3995 x = arith_seq_exclude_end_p(self);
3996
3997 if (FIXNUM_P(b) && NIL_P(e) && FIXNUM_P(s)) {
3998 long i = FIX2LONG(b), unit = FIX2LONG(s);
3999 ary = rb_ary_new_capa(n);
4000 while (n > 0 && FIXABLE(i)) {
4001 rb_ary_push(ary, LONG2FIX(i));
4002 i += unit; // FIXABLE + FIXABLE never overflow;
4003 --n;
4004 }
4005 if (n > 0) {
4006 b = LONG2NUM(i);
4007 while (n > 0) {
4008 rb_ary_push(ary, b);
4009 b = rb_big_plus(b, s);
4010 --n;
4011 }
4012 }
4013 return ary;
4014 }
4015 else if (FIXNUM_P(b) && FIXNUM_P(e) && FIXNUM_P(s)) {
4016 long i = FIX2LONG(b);
4017 long end = FIX2LONG(e);
4018 long unit = FIX2LONG(s);
4019 long len;
4020
4021 if (unit >= 0) {
4022 if (!x) end += 1;
4023
4024 len = end - i;
4025 if (len < 0) len = 0;
4026 ary = rb_ary_new_capa((n < len) ? n : len);
4027 while (n > 0 && i < end) {
4028 rb_ary_push(ary, LONG2FIX(i));
4029 if (i > LONG_MAX - unit) break;
4030 i += unit;
4031 --n;
4032 }
4033 }
4034 else {
4035 if (!x) end -= 1;
4036
4037 len = i - end;
4038 if (len < 0) len = 0;
4039 ary = rb_ary_new_capa((n < len) ? n : len);
4040 while (n > 0 && i > end) {
4041 rb_ary_push(ary, LONG2FIX(i));
4042 if (i < LONG_MIN - unit) break;
4043 i += unit;
4044 --n;
4045 }
4046 }
4047 return ary;
4048 }
4049 else if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4050 /* generate values like ruby_float_step */
4051
4052 double unit = NUM2DBL(s);
4053 double beg = NUM2DBL(b);
4054 double end = NIL_P(e) ? (unit < 0 ? -1 : 1)*HUGE_VAL : NUM2DBL(e);
4055 double len = ruby_float_step_size(beg, end, unit, x);
4056 long i;
4057
4058 if (n > len)
4059 n = (long)len;
4060
4061 if (isinf(unit)) {
4062 if (len > 0) {
4063 ary = rb_ary_new_capa(1);
4064 rb_ary_push(ary, DBL2NUM(beg));
4065 }
4066 else {
4067 ary = rb_ary_new_capa(0);
4068 }
4069 }
4070 else if (unit == 0) {
4071 VALUE val = DBL2NUM(beg);
4072 ary = rb_ary_new_capa(n);
4073 for (i = 0; i < len; ++i) {
4074 rb_ary_push(ary, val);
4075 }
4076 }
4077 else {
4078 ary = rb_ary_new_capa(n);
4079 for (i = 0; i < n; ++i) {
4080 double d = i*unit+beg;
4081 if (unit >= 0 ? end < d : d < end) d = end;
4082 rb_ary_push(ary, DBL2NUM(d));
4083 }
4084 }
4085
4086 return ary;
4087 }
4088
4089 {
4090 VALUE argv[1];
4091 argv[0] = num;
4092 return rb_call_super(1, argv);
4093 }
4094}
4095
4096/*
4097 * call-seq:
4098 * aseq.first -> num or nil
4099 * aseq.first(n) -> an_array
4100 *
4101 * Returns the first number in this arithmetic sequence,
4102 * or an array of the first +n+ elements.
4103 */
4104static VALUE
4105arith_seq_first(int argc, VALUE *argv, VALUE self)
4106{
4107 VALUE b, e, s;
4108
4109 rb_check_arity(argc, 0, 1);
4110
4111 b = arith_seq_begin(self);
4112 e = arith_seq_end(self);
4113 s = arith_seq_step(self);
4114 if (argc == 0) {
4115 if (NIL_P(b)) {
4116 return Qnil;
4117 }
4118 if (!NIL_P(e)) {
4119 VALUE zero = INT2FIX(0);
4120 int r = rb_cmpint(rb_num_coerce_cmp(s, zero, idCmp), s, zero);
4121 if (r > 0 && RTEST(rb_funcall(b, '>', 1, e))) {
4122 return Qnil;
4123 }
4124 if (r < 0 && RTEST(rb_funcall(b, '<', 1, e))) {
4125 return Qnil;
4126 }
4127 }
4128 return b;
4129 }
4130
4131 return arith_seq_take(self, argv[0]);
4132}
4133
4134static inline VALUE
4135num_plus(VALUE a, VALUE b)
4136{
4137 if (RB_INTEGER_TYPE_P(a)) {
4138 return rb_int_plus(a, b);
4139 }
4140 else if (RB_FLOAT_TYPE_P(a)) {
4141 return rb_float_plus(a, b);
4142 }
4143 else if (RB_TYPE_P(a, T_RATIONAL)) {
4144 return rb_rational_plus(a, b);
4145 }
4146 else {
4147 return rb_funcallv(a, '+', 1, &b);
4148 }
4149}
4150
4151static inline VALUE
4152num_minus(VALUE a, VALUE b)
4153{
4154 if (RB_INTEGER_TYPE_P(a)) {
4155 return rb_int_minus(a, b);
4156 }
4157 else if (RB_FLOAT_TYPE_P(a)) {
4158 return rb_float_minus(a, b);
4159 }
4160 else if (RB_TYPE_P(a, T_RATIONAL)) {
4161 return rb_rational_minus(a, b);
4162 }
4163 else {
4164 return rb_funcallv(a, '-', 1, &b);
4165 }
4166}
4167
4168static inline VALUE
4169num_mul(VALUE a, VALUE b)
4170{
4171 if (RB_INTEGER_TYPE_P(a)) {
4172 return rb_int_mul(a, b);
4173 }
4174 else if (RB_FLOAT_TYPE_P(a)) {
4175 return rb_float_mul(a, b);
4176 }
4177 else if (RB_TYPE_P(a, T_RATIONAL)) {
4178 return rb_rational_mul(a, b);
4179 }
4180 else {
4181 return rb_funcallv(a, '*', 1, &b);
4182 }
4183}
4184
4185static inline VALUE
4186num_idiv(VALUE a, VALUE b)
4187{
4188 VALUE q;
4189 if (RB_INTEGER_TYPE_P(a)) {
4190 q = rb_int_idiv(a, b);
4191 }
4192 else if (RB_FLOAT_TYPE_P(a)) {
4193 q = rb_float_div(a, b);
4194 }
4195 else if (RB_TYPE_P(a, T_RATIONAL)) {
4196 q = rb_rational_div(a, b);
4197 }
4198 else {
4199 q = rb_funcallv(a, idDiv, 1, &b);
4200 }
4201
4202 if (RB_INTEGER_TYPE_P(q)) {
4203 return q;
4204 }
4205 else if (RB_FLOAT_TYPE_P(q)) {
4206 return rb_float_floor(q, 0);
4207 }
4208 else if (RB_TYPE_P(q, T_RATIONAL)) {
4209 return rb_rational_floor(q, 0);
4210 }
4211 else {
4212 return rb_funcall(q, rb_intern("floor"), 0);
4213 }
4214}
4215
4216/*
4217 * call-seq:
4218 * aseq.last -> num or nil
4219 * aseq.last(n) -> an_array
4220 *
4221 * Returns the last number in this arithmetic sequence,
4222 * or an array of the last +n+ elements.
4223 */
4224static VALUE
4225arith_seq_last(int argc, VALUE *argv, VALUE self)
4226{
4227 VALUE b, e, s, len_1, len, last, nv, ary;
4228 int last_is_adjusted;
4229 long n;
4230
4231 e = arith_seq_end(self);
4232 if (NIL_P(e)) {
4233 rb_raise(rb_eRangeError,
4234 "cannot get the last element of endless arithmetic sequence");
4235 }
4236
4237 b = arith_seq_begin(self);
4238 s = arith_seq_step(self);
4239
4240 len_1 = num_idiv(num_minus(e, b), s);
4241 if (rb_num_negative_int_p(len_1)) {
4242 if (argc == 0) {
4243 return Qnil;
4244 }
4245 return rb_ary_new_capa(0);
4246 }
4247
4248 last = num_plus(b, num_mul(s, len_1));
4249 if ((last_is_adjusted = arith_seq_exclude_end_p(self) && rb_equal(last, e))) {
4250 last = num_minus(last, s);
4251 }
4252
4253 if (argc == 0) {
4254 return last;
4255 }
4256
4257 if (last_is_adjusted) {
4258 len = len_1;
4259 }
4260 else {
4261 len = rb_int_plus(len_1, INT2FIX(1));
4262 }
4263
4264 rb_scan_args(argc, argv, "1", &nv);
4265 if (!RB_INTEGER_TYPE_P(nv)) {
4266 nv = rb_to_int(nv);
4267 }
4268 if (RTEST(rb_int_gt(nv, len))) {
4269 nv = len;
4270 }
4271 n = NUM2LONG(nv);
4272 if (n < 0) {
4273 rb_raise(rb_eArgError, "negative array size");
4274 }
4275
4276 ary = rb_ary_new_capa(n);
4277 b = rb_int_minus(last, rb_int_mul(s, nv));
4278 while (n) {
4279 b = rb_int_plus(b, s);
4280 rb_ary_push(ary, b);
4281 --n;
4282 }
4283
4284 return ary;
4285}
4286
4287/*
4288 * call-seq:
4289 * aseq.inspect -> string
4290 *
4291 * Convert this arithmetic sequence to a printable form.
4292 */
4293static VALUE
4294arith_seq_inspect(VALUE self)
4295{
4296 struct enumerator *e;
4297 VALUE eobj, str, eargs;
4298 int range_p;
4299
4300 TypedData_Get_Struct(self, struct enumerator, &enumerator_data_type, e);
4301
4302 eobj = rb_attr_get(self, id_receiver);
4303 if (NIL_P(eobj)) {
4304 eobj = e->obj;
4305 }
4306
4307 range_p = RTEST(rb_obj_is_kind_of(eobj, rb_cRange));
4308 str = rb_sprintf("(%s%"PRIsVALUE"%s.", range_p ? "(" : "", eobj, range_p ? ")" : "");
4309
4310 rb_str_buf_append(str, rb_id2str(e->meth));
4311
4312 eargs = rb_attr_get(eobj, id_arguments);
4313 if (NIL_P(eargs)) {
4314 eargs = e->args;
4315 }
4316 if (eargs != Qfalse) {
4317 long argc = RARRAY_LEN(eargs);
4318 const VALUE *argv = RARRAY_CONST_PTR(eargs); /* WB: no new reference */
4319
4320 if (argc > 0) {
4321 VALUE kwds = Qnil;
4322
4323 rb_str_buf_cat2(str, "(");
4324
4325 if (RB_TYPE_P(argv[argc-1], T_HASH)) {
4326 int all_key = TRUE;
4327 rb_hash_foreach(argv[argc-1], key_symbol_p, (VALUE)&all_key);
4328 if (all_key) kwds = argv[--argc];
4329 }
4330
4331 while (argc--) {
4332 VALUE arg = *argv++;
4333
4334 rb_str_append(str, rb_inspect(arg));
4335 rb_str_buf_cat2(str, ", ");
4336 }
4337 if (!NIL_P(kwds)) {
4338 rb_hash_foreach(kwds, kwd_append, str);
4339 }
4340 rb_str_set_len(str, RSTRING_LEN(str)-2); /* drop the last ", " */
4341 rb_str_buf_cat2(str, ")");
4342 }
4343 }
4344
4345 rb_str_buf_cat2(str, ")");
4346
4347 return str;
4348}
4349
4350/*
4351 * call-seq:
4352 * aseq == obj -> true or false
4353 *
4354 * Returns <code>true</code> only if +obj+ is an Enumerator::ArithmeticSequence,
4355 * has equivalent begin, end, step, and exclude_end? settings.
4356 */
4357static VALUE
4358arith_seq_eq(VALUE self, VALUE other)
4359{
4360 if (!RTEST(rb_obj_is_kind_of(other, rb_cArithSeq))) {
4361 return Qfalse;
4362 }
4363
4364 if (!rb_equal(arith_seq_begin(self), arith_seq_begin(other))) {
4365 return Qfalse;
4366 }
4367
4368 if (!rb_equal(arith_seq_end(self), arith_seq_end(other))) {
4369 return Qfalse;
4370 }
4371
4372 if (!rb_equal(arith_seq_step(self), arith_seq_step(other))) {
4373 return Qfalse;
4374 }
4375
4376 if (arith_seq_exclude_end_p(self) != arith_seq_exclude_end_p(other)) {
4377 return Qfalse;
4378 }
4379
4380 return Qtrue;
4381}
4382
4383/*
4384 * call-seq:
4385 * aseq.hash -> integer
4386 *
4387 * Compute a hash-value for this arithmetic sequence.
4388 * Two arithmetic sequences with same begin, end, step, and exclude_end?
4389 * values will generate the same hash-value.
4390 *
4391 * See also Object#hash.
4392 */
4393static VALUE
4394arith_seq_hash(VALUE self)
4395{
4396 st_index_t hash;
4397 VALUE v;
4398
4399 hash = rb_hash_start(arith_seq_exclude_end_p(self));
4400 v = rb_hash(arith_seq_begin(self));
4401 hash = rb_hash_uint(hash, NUM2LONG(v));
4402 v = rb_hash(arith_seq_end(self));
4403 hash = rb_hash_uint(hash, NUM2LONG(v));
4404 v = rb_hash(arith_seq_step(self));
4405 hash = rb_hash_uint(hash, NUM2LONG(v));
4406 hash = rb_hash_end(hash);
4407
4408 return ST2FIX(hash);
4409}
4410
4411#define NUM_GE(x, y) RTEST(rb_num_coerce_relop((x), (y), idGE))
4412
4414 VALUE current;
4415 VALUE end;
4416 VALUE step;
4417 int excl;
4418};
4419
4420/*
4421 * call-seq:
4422 * aseq.each {|i| block } -> aseq
4423 * aseq.each -> aseq
4424 */
4425static VALUE
4426arith_seq_each(VALUE self)
4427{
4428 VALUE c, e, s, len_1, last;
4429 int x;
4430
4431 if (!rb_block_given_p()) return self;
4432
4433 c = arith_seq_begin(self);
4434 e = arith_seq_end(self);
4435 s = arith_seq_step(self);
4436 x = arith_seq_exclude_end_p(self);
4437
4438 if (!RB_TYPE_P(s, T_COMPLEX) && ruby_float_step(c, e, s, x, TRUE)) {
4439 return self;
4440 }
4441
4442 if (NIL_P(e)) {
4443 while (1) {
4444 rb_yield(c);
4445 c = rb_int_plus(c, s);
4446 }
4447
4448 return self;
4449 }
4450
4451 if (rb_equal(s, INT2FIX(0))) {
4452 while (1) {
4453 rb_yield(c);
4454 }
4455
4456 return self;
4457 }
4458
4459 len_1 = num_idiv(num_minus(e, c), s);
4460 last = num_plus(c, num_mul(s, len_1));
4461 if (x && rb_equal(last, e)) {
4462 last = num_minus(last, s);
4463 }
4464
4465 if (rb_num_negative_int_p(s)) {
4466 while (NUM_GE(c, last)) {
4467 rb_yield(c);
4468 c = num_plus(c, s);
4469 }
4470 }
4471 else {
4472 while (NUM_GE(last, c)) {
4473 rb_yield(c);
4474 c = num_plus(c, s);
4475 }
4476 }
4477
4478 return self;
4479}
4480
4481/*
4482 * call-seq:
4483 * aseq.size -> num or nil
4484 *
4485 * Returns the number of elements in this arithmetic sequence if it is a finite
4486 * sequence. Otherwise, returns <code>nil</code>.
4487 */
4488static VALUE
4489arith_seq_size(VALUE self)
4490{
4491 VALUE b, e, s, len_1, len, last;
4492 int x;
4493
4494 b = arith_seq_begin(self);
4495 e = arith_seq_end(self);
4496 s = arith_seq_step(self);
4497 x = arith_seq_exclude_end_p(self);
4498
4499 if (RB_FLOAT_TYPE_P(b) || RB_FLOAT_TYPE_P(e) || RB_FLOAT_TYPE_P(s)) {
4500 double ee, n;
4501
4502 if (NIL_P(e)) {
4503 if (rb_num_negative_int_p(s)) {
4504 ee = -HUGE_VAL;
4505 }
4506 else {
4507 ee = HUGE_VAL;
4508 }
4509 }
4510 else {
4511 ee = NUM2DBL(e);
4512 }
4513
4514 n = ruby_float_step_size(NUM2DBL(b), ee, NUM2DBL(s), x);
4515 if (isinf(n)) return DBL2NUM(n);
4516 if (POSFIXABLE(n)) return LONG2FIX((long)n);
4517 return rb_dbl2big(n);
4518 }
4519
4520 if (NIL_P(e)) {
4521 return DBL2NUM(HUGE_VAL);
4522 }
4523
4524 if (!rb_obj_is_kind_of(s, rb_cNumeric)) {
4525 s = rb_to_int(s);
4526 }
4527
4528 if (rb_equal(s, INT2FIX(0))) {
4529 return DBL2NUM(HUGE_VAL);
4530 }
4531
4532 len_1 = rb_int_idiv(rb_int_minus(e, b), s);
4533 if (rb_num_negative_int_p(len_1)) {
4534 return INT2FIX(0);
4535 }
4536
4537 last = rb_int_plus(b, rb_int_mul(s, len_1));
4538 if (x && rb_equal(last, e)) {
4539 len = len_1;
4540 }
4541 else {
4542 len = rb_int_plus(len_1, INT2FIX(1));
4543 }
4544
4545 return len;
4546}
4547
4548#define sym(name) ID2SYM(rb_intern_const(name))
4549void
4550InitVM_Enumerator(void)
4551{
4552 ID id_private = rb_intern_const("private");
4553
4554 rb_define_method(rb_mKernel, "to_enum", obj_to_enum, -1);
4555 rb_define_method(rb_mKernel, "enum_for", obj_to_enum, -1);
4556
4557 rb_cEnumerator = rb_define_class("Enumerator", rb_cObject);
4559
4560 rb_define_alloc_func(rb_cEnumerator, enumerator_allocate);
4561 rb_define_method(rb_cEnumerator, "initialize", enumerator_initialize, -1);
4562 rb_define_method(rb_cEnumerator, "initialize_copy", enumerator_init_copy, 1);
4563 rb_define_method(rb_cEnumerator, "each", enumerator_each, -1);
4564 rb_define_method(rb_cEnumerator, "each_with_index", enumerator_each_with_index, 0);
4565 rb_define_method(rb_cEnumerator, "each_with_object", enumerator_with_object, 1);
4566 rb_define_method(rb_cEnumerator, "with_index", enumerator_with_index, -1);
4567 rb_define_method(rb_cEnumerator, "with_object", enumerator_with_object, 1);
4568 rb_define_method(rb_cEnumerator, "next_values", enumerator_next_values, 0);
4569 rb_define_method(rb_cEnumerator, "peek_values", enumerator_peek_values_m, 0);
4570 rb_define_method(rb_cEnumerator, "next", enumerator_next, 0);
4571 rb_define_method(rb_cEnumerator, "peek", enumerator_peek, 0);
4572 rb_define_method(rb_cEnumerator, "feed", enumerator_feed, 1);
4573 rb_define_method(rb_cEnumerator, "rewind", enumerator_rewind, 0);
4574 rb_define_method(rb_cEnumerator, "inspect", enumerator_inspect, 0);
4575 rb_define_method(rb_cEnumerator, "size", enumerator_size, 0);
4576 rb_define_method(rb_cEnumerator, "+", enumerator_plus, 1);
4578
4579 /* Lazy */
4581 rb_define_method(rb_mEnumerable, "lazy", enumerable_lazy, 0);
4582
4583 rb_define_alias(rb_cLazy, "_enumerable_map", "map");
4584 rb_define_alias(rb_cLazy, "_enumerable_collect", "collect");
4585 rb_define_alias(rb_cLazy, "_enumerable_flat_map", "flat_map");
4586 rb_define_alias(rb_cLazy, "_enumerable_collect_concat", "collect_concat");
4587 rb_define_alias(rb_cLazy, "_enumerable_select", "select");
4588 rb_define_alias(rb_cLazy, "_enumerable_find_all", "find_all");
4589 rb_define_alias(rb_cLazy, "_enumerable_filter", "filter");
4590 rb_define_alias(rb_cLazy, "_enumerable_filter_map", "filter_map");
4591 rb_define_alias(rb_cLazy, "_enumerable_reject", "reject");
4592 rb_define_alias(rb_cLazy, "_enumerable_grep", "grep");
4593 rb_define_alias(rb_cLazy, "_enumerable_grep_v", "grep_v");
4594 rb_define_alias(rb_cLazy, "_enumerable_zip", "zip");
4595 rb_define_alias(rb_cLazy, "_enumerable_take", "take");
4596 rb_define_alias(rb_cLazy, "_enumerable_take_while", "take_while");
4597 rb_define_alias(rb_cLazy, "_enumerable_drop", "drop");
4598 rb_define_alias(rb_cLazy, "_enumerable_drop_while", "drop_while");
4599 rb_define_alias(rb_cLazy, "_enumerable_uniq", "uniq");
4600 rb_define_private_method(rb_cLazy, "_enumerable_with_index", enumerator_with_index, -1);
4601
4602 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_map"));
4603 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect"));
4604 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_flat_map"));
4605 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_collect_concat"));
4606 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_select"));
4607 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_find_all"));
4608 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter"));
4609 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_filter_map"));
4610 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_reject"));
4611 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep"));
4612 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_grep_v"));
4613 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_zip"));
4614 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take"));
4615 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_take_while"));
4616 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop"));
4617 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_drop_while"));
4618 rb_funcall(rb_cLazy, id_private, 1, sym("_enumerable_uniq"));
4619
4620 rb_define_method(rb_cLazy, "initialize", lazy_initialize, -1);
4621 rb_define_method(rb_cLazy, "to_enum", lazy_to_enum, -1);
4622 rb_define_method(rb_cLazy, "enum_for", lazy_to_enum, -1);
4623 rb_define_method(rb_cLazy, "eager", lazy_eager, 0);
4624 rb_define_method(rb_cLazy, "map", lazy_map, 0);
4625 rb_define_method(rb_cLazy, "collect", lazy_map, 0);
4626 rb_define_method(rb_cLazy, "flat_map", lazy_flat_map, 0);
4627 rb_define_method(rb_cLazy, "collect_concat", lazy_flat_map, 0);
4628 rb_define_method(rb_cLazy, "select", lazy_select, 0);
4629 rb_define_method(rb_cLazy, "find_all", lazy_select, 0);
4630 rb_define_method(rb_cLazy, "filter", lazy_select, 0);
4631 rb_define_method(rb_cLazy, "filter_map", lazy_filter_map, 0);
4632 rb_define_method(rb_cLazy, "reject", lazy_reject, 0);
4633 rb_define_method(rb_cLazy, "grep", lazy_grep, 1);
4634 rb_define_method(rb_cLazy, "grep_v", lazy_grep_v, 1);
4635 rb_define_method(rb_cLazy, "zip", lazy_zip, -1);
4636 rb_define_method(rb_cLazy, "take", lazy_take, 1);
4637 rb_define_method(rb_cLazy, "take_while", lazy_take_while, 0);
4638 rb_define_method(rb_cLazy, "drop", lazy_drop, 1);
4639 rb_define_method(rb_cLazy, "drop_while", lazy_drop_while, 0);
4640 rb_define_method(rb_cLazy, "lazy", lazy_lazy, 0);
4641 rb_define_method(rb_cLazy, "chunk", lazy_super, -1);
4642 rb_define_method(rb_cLazy, "slice_before", lazy_super, -1);
4643 rb_define_method(rb_cLazy, "slice_after", lazy_super, -1);
4644 rb_define_method(rb_cLazy, "slice_when", lazy_super, -1);
4645 rb_define_method(rb_cLazy, "chunk_while", lazy_super, -1);
4646 rb_define_method(rb_cLazy, "uniq", lazy_uniq, 0);
4647 rb_define_method(rb_cLazy, "compact", lazy_compact, 0);
4648 rb_define_method(rb_cLazy, "with_index", lazy_with_index, -1);
4649
4650 lazy_use_super_method = rb_hash_new_with_size(18);
4651 rb_hash_aset(lazy_use_super_method, sym("map"), sym("_enumerable_map"));
4652 rb_hash_aset(lazy_use_super_method, sym("collect"), sym("_enumerable_collect"));
4653 rb_hash_aset(lazy_use_super_method, sym("flat_map"), sym("_enumerable_flat_map"));
4654 rb_hash_aset(lazy_use_super_method, sym("collect_concat"), sym("_enumerable_collect_concat"));
4655 rb_hash_aset(lazy_use_super_method, sym("select"), sym("_enumerable_select"));
4656 rb_hash_aset(lazy_use_super_method, sym("find_all"), sym("_enumerable_find_all"));
4657 rb_hash_aset(lazy_use_super_method, sym("filter"), sym("_enumerable_filter"));
4658 rb_hash_aset(lazy_use_super_method, sym("filter_map"), sym("_enumerable_filter_map"));
4659 rb_hash_aset(lazy_use_super_method, sym("reject"), sym("_enumerable_reject"));
4660 rb_hash_aset(lazy_use_super_method, sym("grep"), sym("_enumerable_grep"));
4661 rb_hash_aset(lazy_use_super_method, sym("grep_v"), sym("_enumerable_grep_v"));
4662 rb_hash_aset(lazy_use_super_method, sym("zip"), sym("_enumerable_zip"));
4663 rb_hash_aset(lazy_use_super_method, sym("take"), sym("_enumerable_take"));
4664 rb_hash_aset(lazy_use_super_method, sym("take_while"), sym("_enumerable_take_while"));
4665 rb_hash_aset(lazy_use_super_method, sym("drop"), sym("_enumerable_drop"));
4666 rb_hash_aset(lazy_use_super_method, sym("drop_while"), sym("_enumerable_drop_while"));
4667 rb_hash_aset(lazy_use_super_method, sym("uniq"), sym("_enumerable_uniq"));
4668 rb_hash_aset(lazy_use_super_method, sym("with_index"), sym("_enumerable_with_index"));
4669 rb_obj_freeze(lazy_use_super_method);
4670 rb_vm_register_global_object(lazy_use_super_method);
4671
4672#if 0 /* for RDoc */
4673 rb_define_method(rb_cLazy, "to_a", lazy_to_a, 0);
4674 rb_define_method(rb_cLazy, "chunk", lazy_chunk, 0);
4675 rb_define_method(rb_cLazy, "chunk_while", lazy_chunk_while, 0);
4676 rb_define_method(rb_cLazy, "slice_after", lazy_slice_after, 0);
4677 rb_define_method(rb_cLazy, "slice_before", lazy_slice_before, 0);
4678 rb_define_method(rb_cLazy, "slice_when", lazy_slice_when, 0);
4679#endif
4680 rb_define_alias(rb_cLazy, "force", "to_a");
4681
4683 rb_define_method(rb_eStopIteration, "result", stop_result, 0);
4684
4685 /* Generator */
4686 rb_cGenerator = rb_define_class_under(rb_cEnumerator, "Generator", rb_cObject);
4687 rb_include_module(rb_cGenerator, rb_mEnumerable);
4688 rb_define_alloc_func(rb_cGenerator, generator_allocate);
4689 rb_define_method(rb_cGenerator, "initialize", generator_initialize, -1);
4690 rb_define_method(rb_cGenerator, "initialize_copy", generator_init_copy, 1);
4691 rb_define_method(rb_cGenerator, "each", generator_each, -1);
4692
4693 /* Yielder */
4694 rb_cYielder = rb_define_class_under(rb_cEnumerator, "Yielder", rb_cObject);
4695 rb_define_alloc_func(rb_cYielder, yielder_allocate);
4696 rb_define_method(rb_cYielder, "initialize", yielder_initialize, 0);
4697 rb_define_method(rb_cYielder, "yield", yielder_yield, -2);
4698 rb_define_method(rb_cYielder, "<<", yielder_yield_push, 1);
4699 rb_define_method(rb_cYielder, "to_proc", yielder_to_proc, 0);
4700
4701 /* Producer */
4702 rb_cEnumProducer = rb_define_class_under(rb_cEnumerator, "Producer", rb_cObject);
4703 rb_define_alloc_func(rb_cEnumProducer, producer_allocate);
4704 rb_define_method(rb_cEnumProducer, "each", producer_each, 0);
4705 rb_define_singleton_method(rb_cEnumerator, "produce", enumerator_s_produce, -1);
4706
4707 /* Chain */
4708 rb_cEnumChain = rb_define_class_under(rb_cEnumerator, "Chain", rb_cEnumerator);
4709 rb_define_alloc_func(rb_cEnumChain, enum_chain_allocate);
4710 rb_define_method(rb_cEnumChain, "initialize", enum_chain_initialize, -2);
4711 rb_define_method(rb_cEnumChain, "initialize_copy", enum_chain_init_copy, 1);
4712 rb_define_method(rb_cEnumChain, "each", enum_chain_each, -1);
4713 rb_define_method(rb_cEnumChain, "size", enum_chain_size, 0);
4714 rb_define_method(rb_cEnumChain, "rewind", enum_chain_rewind, 0);
4715 rb_define_method(rb_cEnumChain, "inspect", enum_chain_inspect, 0);
4716 rb_undef_method(rb_cEnumChain, "feed");
4717 rb_undef_method(rb_cEnumChain, "next");
4718 rb_undef_method(rb_cEnumChain, "next_values");
4719 rb_undef_method(rb_cEnumChain, "peek");
4720 rb_undef_method(rb_cEnumChain, "peek_values");
4721
4722 /* Product */
4723 rb_cEnumProduct = rb_define_class_under(rb_cEnumerator, "Product", rb_cEnumerator);
4724 rb_define_alloc_func(rb_cEnumProduct, enum_product_allocate);
4725 rb_define_method(rb_cEnumProduct, "initialize", enum_product_initialize, -1);
4726 rb_define_method(rb_cEnumProduct, "initialize_copy", enum_product_init_copy, 1);
4727 rb_define_method(rb_cEnumProduct, "each", enum_product_each, 0);
4728 rb_define_method(rb_cEnumProduct, "size", enum_product_size, 0);
4729 rb_define_method(rb_cEnumProduct, "rewind", enum_product_rewind, 0);
4730 rb_define_method(rb_cEnumProduct, "inspect", enum_product_inspect, 0);
4731 rb_undef_method(rb_cEnumProduct, "feed");
4732 rb_undef_method(rb_cEnumProduct, "next");
4733 rb_undef_method(rb_cEnumProduct, "next_values");
4734 rb_undef_method(rb_cEnumProduct, "peek");
4735 rb_undef_method(rb_cEnumProduct, "peek_values");
4736 rb_define_singleton_method(rb_cEnumerator, "product", enumerator_s_product, -1);
4737
4738 /* ArithmeticSequence */
4739 rb_cArithSeq = rb_define_class_under(rb_cEnumerator, "ArithmeticSequence", rb_cEnumerator);
4740 rb_undef_alloc_func(rb_cArithSeq);
4741 rb_undef_method(CLASS_OF(rb_cArithSeq), "new");
4742 rb_define_method(rb_cArithSeq, "begin", arith_seq_begin, 0);
4743 rb_define_method(rb_cArithSeq, "end", arith_seq_end, 0);
4744 rb_define_method(rb_cArithSeq, "exclude_end?", arith_seq_exclude_end, 0);
4745 rb_define_method(rb_cArithSeq, "step", arith_seq_step, 0);
4746 rb_define_method(rb_cArithSeq, "first", arith_seq_first, -1);
4747 rb_define_method(rb_cArithSeq, "last", arith_seq_last, -1);
4748 rb_define_method(rb_cArithSeq, "inspect", arith_seq_inspect, 0);
4749 rb_define_method(rb_cArithSeq, "==", arith_seq_eq, 1);
4750 rb_define_method(rb_cArithSeq, "===", arith_seq_eq, 1);
4751 rb_define_method(rb_cArithSeq, "eql?", arith_seq_eq, 1);
4752 rb_define_method(rb_cArithSeq, "hash", arith_seq_hash, 0);
4753 rb_define_method(rb_cArithSeq, "each", arith_seq_each, 0);
4754 rb_define_method(rb_cArithSeq, "size", arith_seq_size, 0);
4755
4756 rb_provide("enumerator.so"); /* for backward compatibility */
4757}
4758#undef sym
4759
4760void
4761Init_Enumerator(void)
4762{
4763 id_rewind = rb_intern_const("rewind");
4764 id_next = rb_intern_const("next");
4765 id_result = rb_intern_const("result");
4766 id_receiver = rb_intern_const("receiver");
4767 id_arguments = rb_intern_const("arguments");
4768 id_memo = rb_intern_const("memo");
4769 id_method = rb_intern_const("method");
4770 id_force = rb_intern_const("force");
4771 id_to_enum = rb_intern_const("to_enum");
4772 id_each_entry = rb_intern_const("each_entry");
4773 sym_each = ID2SYM(id_each);
4774 sym_yield = ID2SYM(rb_intern_const("yield"));
4775
4776 InitVM(Enumerator);
4777}
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
#define rb_define_private_method(klass, mid, func, arity)
Defines klass#mid and makes it private.
void rb_include_module(VALUE klass, VALUE module)
Includes a module to a class.
Definition class.c:1803
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
Definition class.c:1596
VALUE rb_define_class_under(VALUE outer, const char *name, VALUE super)
Defines a class under the namespace of outer.
Definition class.c:1627
void rb_define_alias(VALUE klass, const char *name1, const char *name2)
Defines an alias of a method.
Definition class.c:2965
void rb_need_block(void)
Declares that the current method needs a block.
Definition eval.c:1038
void rb_undef_method(VALUE klass, const char *name)
Defines an undef of a method.
Definition class.c:2775
int rb_scan_args_kw(int kw_flag, int argc, const VALUE *argv, const char *fmt,...)
Identical to rb_scan_args(), except it also accepts kw_splat.
Definition class.c:3268
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
Definition class.c:3255
int rb_keyword_given_p(void)
Determines if the current method is given a keyword argument.
Definition eval.c:1030
int rb_block_given_p(void)
Determines if the current method is given a block.
Definition eval.c:1017
int rb_get_kwargs(VALUE keyword_hash, const ID *table, int required, int optional, VALUE *values)
Keyword argument deconstructor.
Definition class.c:3044
#define T_COMPLEX
Old name of RUBY_T_COMPLEX.
Definition value_type.h:59
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
Definition value_type.h:87
#define rb_str_buf_cat2
Old name of rb_usascii_str_new_cstr.
Definition string.h:1683
#define OBJ_INIT_COPY(obj, orig)
Old name of RB_OBJ_INIT_COPY.
Definition object.h:41
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
Definition assume.h:29
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:205
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:659
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define rb_exc_new2
Old name of rb_exc_new_cstr.
Definition error.h:37
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
Definition value_type.h:76
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define NUM2DBL
Old name of rb_num2dbl.
Definition double.h:27
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:658
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define ST2FIX
Old name of RB_ST2FIX.
Definition st_data_t.h:33
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define NIL_P
Old name of RB_NIL_P.
#define ALLOCV_N
Old name of RB_ALLOCV_N.
Definition memory.h:405
#define POSFIXABLE
Old name of RB_POSFIXABLE.
Definition fixnum.h:29
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define NUM2LONG
Old name of RB_NUM2LONG.
Definition long.h:51
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define rb_ary_new2
Old name of rb_ary_new_capa.
Definition array.h:657
#define ALLOCV_END
Old name of RB_ALLOCV_END.
Definition memory.h:406
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:660
void rb_iter_break(void)
Breaks from a block.
Definition vm.c:2280
VALUE rb_eRangeError
RangeError exception.
Definition error.c:1422
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1418
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1416
VALUE rb_eStopIteration
StopIteration exception.
Definition enumerator.c:195
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:466
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1469
VALUE rb_eIndexError
IndexError exception.
Definition error.c:1420
VALUE rb_mKernel
Kernel module.
Definition object.c:60
VALUE rb_cObject
Object class.
Definition object.c:61
VALUE rb_mEnumerable
Enumerable module.
Definition enum.c:27
VALUE rb_cEnumerator
Enumerator class.
Definition enumerator.c:178
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
Definition object.c:100
VALUE rb_cNumeric
Numeric class.
Definition numeric.c:197
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:264
VALUE rb_obj_dup(VALUE obj)
Duplicates the given object.
Definition object.c:582
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:686
VALUE rb_cRange
Range class.
Definition range.c:31
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
Definition object.c:176
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:923
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1342
VALUE rb_to_int(VALUE val)
Identical to rb_check_to_int(), except it raises in case of conversion mismatch.
Definition object.c:3365
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition gc.h:603
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
Definition vm_eval.c:1117
VALUE rb_funcall_with_block(VALUE recv, ID mid, int argc, const VALUE *argv, VALUE procval)
Identical to rb_funcallv_public(), except you can pass a block.
Definition vm_eval.c:1194
#define rb_funcall2
Definition eval.h:207
VALUE rb_call_super(int argc, const VALUE *argv)
This resembles ruby's super.
Definition vm_eval.c:362
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_new_capa(long capa)
Identical to rb_ary_new(), except it additionally specifies how many rooms of objects it should alloc...
VALUE rb_ary_resize(VALUE ary, long len)
Expands or shrinks the passed array to the passed length.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_freeze(VALUE obj)
Freeze an array, preventing further modifications.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
Definition enumerator.h:208
VALUE rb_enumerator_size_func(VALUE recv, VALUE argv, VALUE eobj)
This is the type of functions that rb_enumeratorize_with_size() expects.
Definition enumerator.h:45
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
void rb_provide(const char *feature)
Declares that the given feature is already provided by someone else.
Definition load.c:695
VALUE rb_num_coerce_cmp(VALUE lhs, VALUE rhs, ID op)
Identical to rb_num_coerce_bin(), except for return values.
Definition numeric.c:485
VALUE rb_obj_method(VALUE recv, VALUE mid)
Creates a method object.
Definition proc.c:2274
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
Definition proc.c:988
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1169
VALUE rb_proc_call_kw(VALUE recv, VALUE args, int kw_splat)
Identical to rb_proc_call(), except you can specify how to handle the last element of the given array...
Definition proc.c:1130
VALUE rb_obj_is_proc(VALUE recv)
Queries if the given object is a proc.
Definition proc.c:122
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
Definition range.c:1862
#define rb_hash_uint(h, i)
Just another name of st_hash_uint.
Definition string.h:943
#define rb_hash_end(h)
Just another name of st_hash_end.
Definition string.h:946
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3816
VALUE rb_str_dup(VALUE str)
Duplicates a string.
Definition string.c:1979
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3782
void rb_str_set_len(VALUE str, long len)
Overwrites the length of the string.
Definition string.c:3403
st_index_t rb_hash_start(st_index_t i)
Starts a series of hashing.
Definition random.c:1777
VALUE rb_exec_recursive(VALUE(*f)(VALUE g, VALUE h, int r), VALUE g, VALUE h)
"Recursion" API entry point.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:2024
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1492
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:380
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
Definition vm_method.c:3457
void rb_undef_alloc_func(VALUE klass)
Deletes the allocator function of a class.
Definition vm_method.c:1705
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:686
VALUE rb_check_funcall_kw(VALUE recv, ID mid, int argc, const VALUE *argv, int kw_splat)
Identical to rb_check_funcall(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:680
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
Definition symbol.h:285
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:1031
ID rb_to_id(VALUE str)
Identical to rb_intern_str(), except it tries to convert the parameter object to an instance of rb_cS...
Definition string.c:12691
int len
Length of the buffer.
Definition io.h:8
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
Definition iterator.h:58
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
Definition vm_eval.c:1395
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
Definition vm_eval.c:1417
VALUE rb_yield(VALUE val)
Yields the block.
Definition vm_eval.c:1372
VALUE rb_yield_values_kw(int n, const VALUE *argv, int kw_splat)
Identical to rb_yield_values2(), except you can specify how to handle the last element of the given a...
Definition vm_eval.c:1423
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
Definition iterator.h:83
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
Definition vm_eval.c:1563
#define rb_long2int
Just another name of rb_long2int_inline.
Definition long.h:62
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:372
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:167
VALUE rb_block_call(VALUE q, ID w, int e, const VALUE *r, type *t, VALUE y)
Call a method with a block.
VALUE rb_proc_new(type *q, VALUE w)
Creates a rb_cProc instance.
VALUE rb_fiber_new(type *q, VALUE w)
Creates a rb_cFiber instance.
void rb_hash_foreach(VALUE q, int_type *w, VALUE e)
Iteration over the given hash.
VALUE rb_rescue2(type *q, VALUE w, type *e, VALUE r,...)
An equivalent of rescue clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:51
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
Definition rarray.h:281
#define RARRAY_AREF(a, i)
Definition rarray.h:403
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
Definition rarray.h:52
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:79
#define RUBY_TYPED_DEFAULT_FREE
This is a value you can set to rb_data_type_struct::dfree.
Definition rtypeddata.h:80
#define RUBY_TYPED_FREE_IMMEDIATELY
Macros to see if each corresponding flag is defined.
Definition rtypeddata.h:119
#define TypedData_Get_Struct(obj, type, data_type, sval)
Obtains a C struct from inside of a wrapper Ruby object.
Definition rtypeddata.h:736
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
Definition rtypeddata.h:561
#define InitVM(ext)
This macro is for internal use.
Definition ruby.h:231
#define RB_SCAN_ARGS_LAST_HASH_KEYWORDS
Treat a final argument as keywords if it is a hash, and not as keywords otherwise.
Definition scan_args.h:59
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
Definition scan_args.h:78
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
Definition stdarg.h:35
MEMO.
Definition imemo.h:103
Definition enumerator.c:251
Decomposed Enumerator::ArithmeicSequence.
Definition enumerator.h:53
int exclude_end
Whether the endpoint is open or closed.
Definition enumerator.h:57
VALUE end
"Right" or "highest" endpoint of the sequence.
Definition enumerator.h:55
VALUE step
Step between a sequence.
Definition enumerator.h:56
VALUE begin
"Left" or "lowest" endpoint of the sequence.
Definition enumerator.h:54
This is the struct that holds necessary info for a struct.
Definition rtypeddata.h:211
VALUE flags
Type-specific behavioural characteristics.
Definition rtypeddata.h:325
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:264
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
Definition value_type.h:433
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:376