14 #include "internal/compar.h"
15 #include "internal/enum.h"
16 #include "internal/hash.h"
17 #include "internal/imemo.h"
18 #include "internal/numeric.h"
19 #include "internal/object.h"
20 #include "internal/proc.h"
21 #include "internal/rational.h"
22 #include "internal/re.h"
24 #include "ruby_assert.h"
31 static ID id__separator;
32 static ID id_chunk_categorize;
33 static ID id_chunk_enumerable;
34 static ID id_sliceafter_enum;
35 static ID id_sliceafter_pat;
36 static ID id_sliceafter_pred;
37 static ID id_slicebefore_enumerable;
38 static ID id_slicebefore_sep_pat;
39 static ID id_slicebefore_sep_pred;
40 static ID id_slicewhen_enum;
41 static ID id_slicewhen_inverted;
42 static ID id_slicewhen_pred;
45 #define id_each idEach
48 #define id_lshift idLTLT
49 #define id_call idCall
50 #define id_size idSize
55 if (argc == 0)
return Qnil;
56 if (argc == 1)
return argv[0];
60 #define ENUM_WANT_SVALUE() do { \
61 i = rb_enum_values_pack(argc, argv); \
65 enum_yield(
int argc,
VALUE ary)
68 return rb_yield_force_blockarg(ary);
75 enum_yield_array(
VALUE ary)
80 return rb_yield_force_blockarg(ary);
89 struct MEMO *memo = MEMO_CAST(args);
101 struct MEMO *memo = MEMO_CAST(args);
102 VALUE converted_element, match;
107 match =
NIL_P(converted_element) ?
Qfalse : rb_reg_match_p(memo->v1, i, 0);
108 if (match == memo->u3.value) {
117 struct MEMO *memo = MEMO_CAST(args);
130 struct MEMO *memo = MEMO_NEW(pat, ary, test);
175 return enum_grep0(obj, pat,
Qtrue);
207 return enum_grep0(obj, pat,
Qfalse);
210 #define COUNT_BIGNUM IMEMO_FL_USER0
211 #define MEMO_V3_SET(m, v) RB_OBJ_WRITE((m), &(m)->u3.value, (v))
214 imemo_count_up(
struct MEMO *memo)
216 if (memo->flags & COUNT_BIGNUM) {
217 MEMO_V3_SET(memo, rb_int_succ(memo->u3.value));
219 else if (++memo->u3.cnt == 0) {
221 unsigned long buf[2] = {0, 1};
223 memo->flags |= COUNT_BIGNUM;
228 imemo_count_value(
struct MEMO *memo)
230 if (memo->flags & COUNT_BIGNUM) {
231 return memo->u3.value;
241 struct MEMO *memo = MEMO_CAST(memop);
246 imemo_count_up(memo);
254 struct MEMO *memo = MEMO_CAST(memop);
257 imemo_count_up(memo);
265 struct MEMO *memo = MEMO_CAST(memop);
267 imemo_count_up(memo);
315 rb_warn(
"given block not used");
320 memo = MEMO_NEW(item, 0, 0);
322 return imemo_count_value(memo);
325 NORETURN(
static void found(
VALUE i,
VALUE memop));
328 struct MEMO *memo = MEMO_CAST(memop);
329 MEMO_V1_SET(memo, i);
349 if (
RTEST(enum_yield(argc, i))) {
384 memo = MEMO_NEW(
Qundef, 0, 0);
385 if (rb_block_pair_yield_optimizable())
386 rb_block_call2(obj, id_each, 0, 0, find_i_fast, (
VALUE)memo, RB_BLOCK_NO_USE_PACKED_ARGS);
388 rb_block_call2(obj, id_each, 0, 0, find_i, (
VALUE)memo, RB_BLOCK_NO_USE_PACKED_ARGS);
392 if (!
NIL_P(if_none)) {
401 struct MEMO *memo = MEMO_CAST(memop);
406 MEMO_V1_SET(memo, imemo_count_value(memo));
409 imemo_count_up(memo);
416 struct MEMO *memo = MEMO_CAST(memop);
419 MEMO_V1_SET(memo, imemo_count_value(memo));
422 imemo_count_up(memo);
451 enum_find_index(
int argc,
VALUE *argv,
VALUE obj)
459 func = find_index_iter_i;
464 rb_warn(
"given block not used");
469 memo = MEMO_NEW(
Qnil, condition_value, 0);
479 if (
RTEST(enum_yield(argc, i))) {
488 return rb_check_funcall_default(
self, id_size, 0, 0,
Qnil);
492 limit_by_enum_size(
VALUE obj,
long n)
498 return ((
unsigned long)n > limit) ? (long)limit : n;
502 enum_size_over_p(
VALUE obj,
long n)
506 return ((
unsigned long)n >
FIX2ULONG(size));
528 enum_find_all(
VALUE obj)
569 enum_filter_map(
VALUE obj)
587 if (!
RTEST(enum_yield(argc, i))) {
612 enum_reject(
VALUE obj)
657 enum_collect(
VALUE obj)
660 int min_argc, max_argc;
665 min_argc = rb_block_min_max_arity(&max_argc);
666 rb_lambda_call(obj, id_each, 0, 0, collect_i, min_argc, max_argc, ary);
708 enum_flat_map(
VALUE obj)
749 return enum_hashify_into(obj, argc, argv, iter,
rb_hash_new());
756 return rb_hash_set_pair(hash, i);
790 return enum_hashify(obj, argc, argv, iter);
796 struct MEMO *memo = MEMO_CAST(p);
800 if (UNDEF_P(memo->v1)) {
801 MEMO_V1_SET(memo, i);
812 struct MEMO *memo = MEMO_CAST(p);
817 if (UNDEF_P(memo->v1)) {
818 MEMO_V1_SET(memo, i);
820 else if (
SYMBOL_P(name = memo->u3.value)) {
828 MEMO_V1_SET(memo, rb_f_send(numberof(args), args, memo->v1));
841 return UNDEF_P(init) ?
Qnil : init;
869 else if (RB_BIGNUM_TYPE_P(e))
1041 enum_inject(
int argc,
VALUE *argv,
VALUE obj)
1065 op =
id ?
ID2SYM(
id) : init;
1079 if (iter == inject_op_i &&
1083 return ary_inject_op(obj, init, op);
1086 memo = MEMO_NEW(init,
Qnil, op);
1088 if (UNDEF_P(memo->v1))
return Qnil;
1095 struct MEMO *memo = MEMO_CAST(arys);
1099 if (
RTEST(enum_yield(argc, i))) {
1138 enum_partition(
VALUE obj)
1158 group = enum_yield(argc, i);
1193 enum_group_by(
VALUE obj)
1197 return enum_hashify(obj, 0, 0, group_by_i);
1201 tally_up(st_data_t *group, st_data_t *value, st_data_t arg,
int existing)
1216 *value = (st_data_t)tally;
1224 rb_hash_stlike_update(hash, group, tally_up, (st_data_t)hash);
1232 rb_enum_tally_up(hash, i);
1287 hash = rb_to_hash_type(argv[0]);
1288 rb_check_frozen(hash);
1294 return enum_hashify_into(obj, 0, 0, tally_i, hash);
1301 struct MEMO *memo = MEMO_CAST(params);
1304 MEMO_V1_SET(memo, i);
1343 return enum_take(obj, argv[0]);
1346 memo = MEMO_NEW(
Qnil, 0, 0);
1385 enum_sort(
VALUE obj)
1390 #define SORT_BY_BUFSIZE 16
1391 #define SORT_BY_UNIFORMED(num, flo, fix) (((num&1)<<2)|((flo&1)<<1)|fix)
1396 uint8_t primitive_uniformed;
1403 VALUE ary = data->ary;
1408 v = enum_yield(argc, i);
1410 if (
RBASIC(ary)->klass) {
1413 if (
RARRAY_LEN(data->buf) != SORT_BY_BUFSIZE*2) {
1417 if (data->primitive_uniformed) {
1425 if (data->n == SORT_BY_BUFSIZE) {
1433 sort_by_cmp(
const void *ap,
const void *bp,
void *data)
1439 if (
RBASIC(ary)->klass) {
1446 return OPTIMIZED_CMP(a, b);
1454 #define uless rb_uniform_is_less
1455 #define UNIFORM_SWAP(a,b)\
1456 do{struct rb_uniform_sort_data tmp = a; a = b; b = tmp;} while(0)
1472 return rb_float_cmp(b, a) > 0;
1476 return rb_float_cmp(a, b) < 0;
1489 return rb_float_cmp(b, a) < 0;
1493 return rb_float_cmp(a, b) > 0;
1497 #define med3_val(a,b,c) (uless(a,b)?(uless(b,c)?b:uless(c,a)?a:c):(uless(c,b)?b:uless(a,c)?a:c))
1503 if ((ptr_end - ptr_begin) < 2)
return;
1505 *index = ptr_begin+1;
1506 for (; index < ptr_end; index++) {
1509 if (uless(tmp.v, ptr_begin->v)) {
1510 while (ptr_begin < j) {
1516 while (uless(tmp.v, (--k)->v)) {
1527 size_t offset,
size_t len)
1531 while ((c = (offset<<1)+1) <=
len) {
1532 if (c <
len && uless(ptr_begin[c].v, ptr_begin[c+1].v)) {
1535 if (!uless(tmp.v, ptr_begin[c].v))
break;
1536 ptr_begin[offset] = ptr_begin[c];
1539 ptr_begin[offset] = tmp;
1546 size_t n = ptr_end - ptr_begin;
1549 for (
size_t offset = n>>1; offset > 0;) {
1550 rb_uniform_heap_down_2(ptr_begin, --offset, n-1);
1552 for (
size_t offset = n-1; offset > 0;) {
1553 UNIFORM_SWAP(*ptr_begin, ptr_begin[offset]);
1554 rb_uniform_heap_down_2(ptr_begin, 0, --offset);
1564 if (ptr_end - ptr_begin <= 16) {
1565 rb_uniform_insertionsort_2(ptr_begin, ptr_end);
1569 rb_uniform_heapsort_2(ptr_begin, ptr_end);
1573 VALUE x = med3_val(ptr_begin->v,
1574 ptr_begin[(ptr_end - ptr_begin)>>1].v,
1580 while (uless(i->v, x)) i++;
1581 while (uless(x, j->v)) j--;
1583 UNIFORM_SWAP(*i, *j);
1589 if (ptr_end - j > 1) rb_uniform_quicksort_intro_2(j, ptr_end, d-1);
1590 if (i - ptr_begin > 1) rb_uniform_quicksort_intro_2(ptr_begin, i, d-1);
1602 size_t n = ptr_end - ptr_begin;
1603 size_t d = CHAR_BIT *
sizeof(n) - nlz_intptr(n) - 1;
1604 bool sorted_flag =
true;
1607 if (rb_uniform_is_larger((
ptr-1)->v, (
ptr)->v)) {
1608 sorted_flag =
false;
1616 rb_uniform_quicksort_intro_2(ptr_begin, ptr_end, d<<1);
1705 enum_sort_by(
VALUE obj)
1720 RBASIC_CLEAR_CLASS(ary);
1723 memo = MEMO_NEW(0, 0, 0);
1728 data->primitive_uniformed = SORT_BY_UNIFORMED((CMP_OPTIMIZABLE(FLOAT) && CMP_OPTIMIZABLE(INTEGER)),
1729 CMP_OPTIMIZABLE(FLOAT),
1730 CMP_OPTIMIZABLE(INTEGER));
1739 if (data->primitive_uniformed) {
1747 sort_by_cmp, (
void *)ary));
1750 if (
RBASIC(ary)->klass) {
1762 #define ENUMFUNC(name) argc ? name##_eqq : rb_block_given_p() ? name##_iter_i : name##_i
1764 #define ENUM_BLOCK_CALL(name) \
1765 rb_block_call2(obj, id_each, 0, 0, ENUMFUNC(name), (VALUE)memo, rb_block_given_p() && rb_block_pair_yield_optimizable() ? RB_BLOCK_NO_USE_PACKED_ARGS : 0);
1767 #define MEMO_ENUM_NEW(v1) (rb_check_arity(argc, 0, 1), MEMO_NEW((v1), (argc ? *argv : 0), 0))
1769 #define DEFINE_ENUMFUNCS(name) \
1770 static VALUE enum_##name##_func(VALUE result, struct MEMO *memo); \
1773 name##_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, memo)) \
1775 return enum_##name##_func(rb_enum_values_pack(argc, argv), MEMO_CAST(memo)); \
1779 name##_iter_i(RB_BLOCK_CALL_FUNC_ARGLIST(i, memo)) \
1781 return enum_##name##_func(rb_yield_values2(argc, argv), MEMO_CAST(memo)); \
1785 name##_eqq(RB_BLOCK_CALL_FUNC_ARGLIST(i, memo)) \
1787 ENUM_WANT_SVALUE(); \
1788 return enum_##name##_func(rb_funcallv(MEMO_CAST(memo)->v2, id_eqq, 1, &i), MEMO_CAST(memo)); \
1792 enum_##name##_func(VALUE result, struct MEMO *memo)
1794 #define WARN_UNUSED_BLOCK(argc) do { \
1795 if ((argc) > 0 && rb_block_given_p()) { \
1796 rb_warn("given block not used"); \
1800 DEFINE_ENUMFUNCS(all)
1802 if (!
RTEST(result)) {
1803 MEMO_V1_SET(memo,
Qfalse);
1858 struct MEMO *memo = MEMO_ENUM_NEW(
Qtrue);
1859 WARN_UNUSED_BLOCK(argc);
1860 ENUM_BLOCK_CALL(all);
1864 DEFINE_ENUMFUNCS(any)
1866 if (
RTEST(result)) {
1867 MEMO_V1_SET(memo,
Qtrue);
1921 WARN_UNUSED_BLOCK(argc);
1922 ENUM_BLOCK_CALL(any);
1926 DEFINE_ENUMFUNCS(one)
1928 if (
RTEST(result)) {
1929 if (UNDEF_P(memo->v1)) {
1930 MEMO_V1_SET(memo,
Qtrue);
1932 else if (memo->v1 ==
Qtrue) {
1933 MEMO_V1_SET(memo,
Qfalse);
1946 int (*cmpfunc)(
const void *,
const void *,
void *);
1954 if (
RBASIC(data->buf)->klass) {
1956 data->rev ?
"max" :
"min",
1957 data->by ?
"_by" :
"");
1963 nmin_cmp(
const void *ap,
const void *bp,
void *_data)
1967 #define rb_cmpint(cmp, a, b) rb_cmpint(cmpint_reenter_check(data, (cmp)), a, b)
1968 return OPTIMIZED_CMP(a, b);
1973 nmin_block_cmp(
const void *ap,
const void *bp,
void *_data)
1978 cmpint_reenter_check(data, cmp);
1995 if (data->curlen <= data->n)
2000 eltsize = data->by ? 2 : 1;
2001 numelts = data->curlen;
2006 #define GETPTR(i) (beg+(i)*eltsize)
2008 #define SWAP(i, j) do { \
2010 memcpy(tmp, GETPTR(i), sizeof(VALUE)*eltsize); \
2011 memcpy(GETPTR(i), GETPTR(j), sizeof(VALUE)*eltsize); \
2012 memcpy(GETPTR(j), tmp, sizeof(VALUE)*eltsize); \
2016 long pivot_index = left + (right-left)/2;
2017 long num_pivots = 1;
2019 SWAP(pivot_index, right);
2020 pivot_index = right;
2024 while (i <= right-num_pivots) {
2025 int c = data->cmpfunc(GETPTR(i), GETPTR(pivot_index), data);
2029 SWAP(i, right-num_pivots);
2034 SWAP(i, store_index);
2040 for (i = right; right-num_pivots < i; i--) {
2047 if (store_index <= n && n <= store_index+num_pivots)
2050 if (n < store_index) {
2051 right = store_index-1;
2054 left = store_index+num_pivots;
2060 data->limit =
RARRAY_AREF(data->buf, store_index*eltsize);
2061 data->curlen = data->n;
2074 cmpv = enum_yield(argc, i);
2078 if (!UNDEF_P(data->limit)) {
2079 int c = data->cmpfunc(&cmpv, &data->limit, data);
2092 if (data->curlen == data->bufmax) {
2100 rb_nmin_run(
VALUE obj,
VALUE num,
int by,
int rev,
int ary)
2110 if (LONG_MAX/4/(by ? 2 : 1) < data.n)
2112 data.bufmax = data.n * 4;
2116 data.cmpfunc = by ? nmin_cmp :
2140 data.cmpfunc, (
void *)&data);
2150 data.cmpfunc, (
void *)&data);
2210 WARN_UNUSED_BLOCK(argc);
2211 ENUM_BLOCK_CALL(one);
2213 if (UNDEF_P(result))
return Qfalse;
2217 DEFINE_ENUMFUNCS(none)
2219 if (
RTEST(result)) {
2220 MEMO_V1_SET(memo,
Qfalse);
2269 struct MEMO *memo = MEMO_ENUM_NEW(
Qtrue);
2271 WARN_UNUSED_BLOCK(argc);
2272 ENUM_BLOCK_CALL(none);
2283 struct min_t *memo = MEMO_FOR(
struct min_t, args);
2287 if (UNDEF_P(memo->min)) {
2291 if (OPTIMIZED_CMP(i, memo->min) < 0) {
2302 struct min_t *memo = MEMO_FOR(
struct min_t, args);
2306 if (UNDEF_P(memo->min)) {
2380 struct min_t *m = NEW_MEMO_FOR(
struct min_t, memo);
2385 return rb_nmin_run(obj, num, 0, 0, 0);
2395 if (UNDEF_P(result))
return Qnil;
2406 struct max_t *memo = MEMO_FOR(
struct max_t, args);
2410 if (UNDEF_P(memo->max)) {
2414 if (OPTIMIZED_CMP(i, memo->max) > 0) {
2424 struct max_t *memo = MEMO_FOR(
struct max_t, args);
2429 if (UNDEF_P(memo->max)) {
2502 struct max_t *m = NEW_MEMO_FOR(
struct max_t, memo);
2507 return rb_nmin_run(obj, num, 0, 1, 0);
2517 if (UNDEF_P(result))
return Qnil;
2532 if (UNDEF_P(memo->min)) {
2537 n = OPTIMIZED_CMP(i, memo->min);
2541 n = OPTIMIZED_CMP(j, memo->max);
2557 if (UNDEF_P(memo->last)) {
2564 n = OPTIMIZED_CMP(j, i);
2574 minmax_i_update(i, j, memo);
2584 if (UNDEF_P(memo->min)) {
2609 if (UNDEF_P(memo->last)) {
2626 minmax_ii_update(i, j, memo);
2663 enum_minmax(
VALUE obj)
2672 if (!UNDEF_P(m->last))
2673 minmax_ii_update(m->last, m->last, m);
2677 if (!UNDEF_P(m->last))
2678 minmax_i_update(m->last, m->last, m);
2680 if (!UNDEF_P(m->min)) {
2689 struct MEMO *memo = MEMO_CAST(args);
2694 v = enum_yield(argc, i);
2695 if (UNDEF_P(memo->v1)) {
2696 MEMO_V1_SET(memo, v);
2697 MEMO_V2_SET(memo, i);
2699 else if (OPTIMIZED_CMP(v, memo->v1) < 0) {
2700 MEMO_V1_SET(memo, v);
2701 MEMO_V2_SET(memo, i);
2743 enum_min_by(
int argc,
VALUE *argv,
VALUE obj)
2752 if (argc && !
NIL_P(num = argv[0]))
2753 return rb_nmin_run(obj, num, 1, 0, 0);
2763 struct MEMO *memo = MEMO_CAST(args);
2768 v = enum_yield(argc, i);
2769 if (UNDEF_P(memo->v1)) {
2770 MEMO_V1_SET(memo, v);
2771 MEMO_V2_SET(memo, i);
2773 else if (OPTIMIZED_CMP(v, memo->v1) > 0) {
2774 MEMO_V1_SET(memo, v);
2775 MEMO_V2_SET(memo, i);
2817 enum_max_by(
int argc,
VALUE *argv,
VALUE obj)
2826 if (argc && !
NIL_P(num = argv[0]))
2827 return rb_nmin_run(obj, num, 1, 1, 0);
2846 if (UNDEF_P(memo->min_bv)) {
2853 if (OPTIMIZED_CMP(v1, memo->min_bv) < 0) {
2857 if (OPTIMIZED_CMP(v2, memo->max_bv) > 0) {
2873 vi = enum_yield(argc, i);
2875 if (UNDEF_P(memo->last_bv)) {
2884 n = OPTIMIZED_CMP(vj, vi);
2899 minmax_by_i_update(vi, vj, i, j, memo);
2928 enum_minmax_by(
VALUE obj)
2942 if (!UNDEF_P(m->last_bv))
2943 minmax_by_i_update(m->last_bv, m->last_bv, m->last, m->last, m);
2951 struct MEMO *memo = MEMO_CAST(args);
2954 MEMO_V2_SET(memo,
Qtrue);
2980 struct MEMO *memo = MEMO_NEW(val,
Qfalse, 0);
2989 struct vm_ifunc *ifunc = rb_current_ifunc();
2990 ifunc->data = (
const void *)rb_int_succ(index);
3024 enum_each_with_index(
int argc,
VALUE *argv,
VALUE obj)
3060 enum_reverse_each(
int argc,
VALUE *argv,
VALUE obj)
3067 ary = enum_to_a(argc, argv, obj);
3087 enum_yield(argc, i);
3130 enum_each_entry(
int argc,
VALUE *argv,
VALUE obj)
3138 add_int(
VALUE x,
long n)
3146 div_int(
VALUE x,
long n)
3153 #define dont_recycle_block_arg(arity) ((arity) == 1 || (arity) < 0)
3158 struct MEMO *memo = MEMO_CAST(m);
3159 VALUE ary = memo->v1;
3161 long size = memo->u3.cnt;
3189 size = enum_size(obj, 0, 0);
3195 n = add_int(size, slice_size-1);
3196 return div_int(n, slice_size);
3229 size = limit_by_enum_size(obj, size);
3231 arity = rb_block_arity();
3232 memo = MEMO_NEW(ary, dont_recycle_block_arg(arity), size);
3243 struct MEMO *memo = MEMO_CAST(args);
3244 VALUE ary = memo->v1;
3246 long size = memo->u3.cnt;
3270 size = enum_size(obj, 0, 0);
3273 n = add_int(size, 1 - cons_size);
3274 return (OPTIMIZED_CMP(n, zero) == -1) ? zero : n;
3306 arity = rb_block_arity();
3307 if (enum_size_over_p(obj, size))
return obj;
3308 memo = MEMO_NEW(
rb_ary_new2(size), dont_recycle_block_arg(arity), size);
3343 rb_block_call(obj, id_each, 0, 0, each_with_object_i, memo);
3351 struct MEMO *memo = (
struct MEMO *)memoval;
3352 VALUE result = memo->v1;
3353 VALUE args = memo->v2;
3354 long n = memo->u3.cnt++;
3370 if (
NIL_P(result)) {
3371 enum_yield_array(tmp);
3399 struct MEMO *memo = (
struct MEMO *)memoval;
3400 VALUE result = memo->v1;
3401 VALUE args = memo->v2;
3416 if (UNDEF_P(v[0])) {
3423 if (
NIL_P(result)) {
3424 enum_yield_array(tmp);
3513 for (i=0; i<argc; i++) {
3522 static const VALUE sym_each = STATIC_ID2SYM(id_each);
3524 for (i=0; i<argc; i++) {
3529 argv[i] =
rb_funcallv(argv[i], conv, 1, &sym_each);
3537 memo = MEMO_NEW(result, args, 0);
3546 struct MEMO *memo = MEMO_CAST(args);
3580 memo = MEMO_NEW(result, 0,
len);
3614 enum_take_while(
VALUE obj)
3627 struct MEMO *memo = MEMO_CAST(args);
3628 if (memo->u3.cnt == 0) {
3668 memo = MEMO_NEW(result, 0,
len);
3677 struct MEMO *memo = MEMO_CAST(args);
3680 if (!memo->u3.state && !
RTEST(enum_yield(argc, i))) {
3681 memo->u3.state = TRUE;
3683 if (memo->u3.state) {
3709 enum_drop_while(
VALUE obj)
3716 memo = MEMO_NEW(result, 0, FALSE);
3727 enum_yield(argc, i);
3743 size = enum_size(
self, args, 0);
3744 if (
NIL_P(size) || FIXNUM_ZERO_P(size))
return size;
3747 if (mul <= 0)
return INT2FIX(0);
3789 if (!argc ||
NIL_P(nv = argv[0])) {
3794 if (n <= 0)
return Qnil;
3797 RBASIC_CLEAR_CLASS(ary);
3801 while (n < 0 || 0 < --n) {
3802 for (i=0; i<
len; i++) {
3826 v =
rb_funcallv(argp->categorize, id_call, 1, &i);
3829 if (!
NIL_P(argp->prev_value)) {
3832 argp->prev_value = argp->prev_elts =
Qnil;
3837 else if (
NIL_P(v) || v == separator) {
3838 if (!
NIL_P(argp->prev_value)) {
3841 argp->prev_value = argp->prev_elts =
Qnil;
3848 if (
NIL_P(argp->prev_value)) {
3849 argp->prev_value = v;
3853 if (
rb_equal(argp->prev_value, v)) {
3859 argp->prev_value = v;
3876 memo->prev_value =
Qnil;
3877 memo->prev_elts =
Qnil;
3882 if (!
NIL_P(memo->prev_elts)) {
3994 enum_chunk(
VALUE enumerable)
4023 if (!
NIL_P(argp->sep_pat))
4024 header_p =
rb_funcallv(argp->sep_pat, id_eqq, 1, &i);
4026 header_p =
rb_funcallv(argp->sep_pred, id_call, 1, &i);
4027 if (
RTEST(header_p)) {
4028 if (!
NIL_P(argp->prev_elts))
4029 rb_funcallv(argp->yielder, id_lshift, 1, &argp->prev_elts);
4033 if (
NIL_P(argp->prev_elts))
4052 memo->prev_elts =
Qnil;
4055 rb_block_call(enumerable, id_each, 0, 0, slicebefore_ii, arg);
4057 if (!
NIL_P(memo->prev_elts))
4058 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4223 enum_slice_before(
int argc,
VALUE *argv,
VALUE enumerable)
4229 rb_error_arity(argc, 0, 0);
4255 #define UPDATE_MEMO ((void)(memo = MEMO_FOR(struct sliceafter_arg, _memo)))
4262 if (
NIL_P(memo->prev_elts)) {
4269 if (
NIL_P(memo->pred)) {
4279 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4281 memo->prev_elts =
Qnil;
4298 memo->prev_elts =
Qnil;
4301 rb_block_call(enumerable, id_each, 0, 0, sliceafter_ii, arg);
4303 if (!
NIL_P(memo->prev_elts))
4304 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4345 enum_slice_after(
int argc,
VALUE *argv,
VALUE enumerable)
4379 #define UPDATE_MEMO ((void)(memo = MEMO_FOR(struct slicewhen_arg, _memo)))
4386 if (UNDEF_P(memo->prev_elt)) {
4393 args[0] = memo->prev_elt;
4402 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4428 memo->prev_elts =
Qnil;
4432 rb_block_call(enumerable, id_each, 0, 0, slicewhen_ii, arg);
4434 if (!
NIL_P(memo->prev_elts))
4435 rb_funcallv(memo->yielder, id_lshift, 1, &memo->prev_elts);
4502 enum_slice_when(
VALUE enumerable)
4568 enum_chunk_while(
VALUE enumerable)
4596 memo->v = rb_fix_plus(
LONG2FIX(memo->n), memo->v);
4599 switch (
TYPE(memo->r)) {
4600 case T_RATIONAL: memo->v = rb_rational_plus(memo->r, memo->v);
break;
4626 if (UNDEF_P(memo->r)) {
4630 memo->r = rb_rational_plus(memo->r, i);
4656 memo->float_value = 0;
4657 sum_iter_some_value(i, memo);
4666 else if (! isfinite(x)) {
4667 if (isinf(x) && isinf(f) && signbit(x) != signbit(f)) {
4675 else if (isinf(f)) {
4682 if (fabs(f) >= fabs(x)) {
4698 if (memo->block_given) {
4702 if (memo->float_value) {
4703 sum_iter_Kahan_Babuska(i, memo);
4705 else switch (
TYPE(memo->v)) {
4706 default: sum_iter_some_value(i, memo);
return;
4707 case T_FLOAT: sum_iter_Kahan_Babuska(i, memo);
return;
4712 case T_FIXNUM: sum_iter_fixnum(i, memo);
return;
4713 case T_BIGNUM: sum_iter_bignum(i, memo);
return;
4714 case T_RATIONAL: sum_iter_rational(i, memo);
return;
4716 sum_iter_normalize_memo(memo);
4719 memo->float_value = 1;
4720 sum_iter_Kahan_Babuska(i, memo);
4723 sum_iter_normalize_memo(memo);
4724 sum_iter_some_value(i, memo);
4764 if (rb_int_ge(end, beg)) {
4766 a = rb_int_plus(rb_int_minus(end, beg),
LONG2FIX(1));
4767 a = rb_int_mul(a, rb_int_plus(end, beg));
4769 return rb_int_plus(init, a);
4828 if (!memo.block_given && !memo.float_value &&
4829 (
FIXNUM_P(beg) || RB_BIGNUM_TYPE_P(beg)) &&
4830 (
FIXNUM_P(end) || RB_BIGNUM_TYPE_P(end))) {
4831 return int_range_sum(beg, end, excl, memo.v);
4837 hash_sum(obj, &memo);
4841 if (memo.float_value) {
4842 return DBL2NUM(memo.f + memo.c);
4846 memo.v = rb_fix_plus(
LONG2FIX(memo.n), memo.v);
4847 if (!UNDEF_P(memo.r)) {
4848 memo.v = rb_rational_plus(memo.r, memo.v);
4858 rb_hash_add_new_element(hash, i, i);
4892 enum_uniq(
VALUE obj)
4900 ret = rb_hash_values(hash);
4928 enum_compact(
VALUE obj)
5113 Init_Enumerable(
void)
5185 id_slicebefore_enumerable =
rb_intern_const(
"slicebefore_enumerable");
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
VALUE rb_define_module(const char *name)
Defines a top-level module.
int rb_scan_args(int argc, const VALUE *argv, const char *fmt,...)
Retrieves argument from argc and argv to given VALUE references according to the format string.
void rb_define_method(VALUE klass, const char *name, VALUE(*func)(ANYARGS), int argc)
Defines a method.
int rb_block_given_p(void)
Determines if the current method is given a block.
#define TYPE(_)
Old name of rb_type.
#define RB_INTEGER_TYPE_P
Old name of rb_integer_type_p.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define UNREACHABLE
Old name of RBIMPL_UNREACHABLE.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define ULONG2NUM
Old name of RB_ULONG2NUM.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define UNREACHABLE_RETURN
Old name of RBIMPL_UNREACHABLE_RETURN.
#define SYM2ID
Old name of RB_SYM2ID.
#define FIXNUM_FLAG
Old name of RUBY_FIXNUM_FLAG.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_RATIONAL
Old name of RUBY_T_RATIONAL.
#define T_HASH
Old name of RUBY_T_HASH.
#define NUM2DBL
Old name of rb_num2dbl.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define T_UNDEF
Old name of RUBY_T_UNDEF.
#define Qtrue
Old name of RUBY_Qtrue.
#define FIXNUM_MAX
Old name of RUBY_FIXNUM_MAX.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define NIL_P
Old name of RB_NIL_P.
#define DBL2NUM
Old name of rb_float_new.
#define NUM2LONG
Old name of RB_NUM2LONG.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define CONST_ID
Old name of RUBY_CONST_ID.
#define rb_ary_new2
Old name of rb_ary_new_capa.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
#define T_REGEXP
Old name of RUBY_T_REGEXP.
void rb_raise(VALUE exc_class, const char *fmt,...)
Exception entry point.
VALUE rb_rescue2(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*r_proc)(VALUE, VALUE), VALUE data2,...)
An equivalent of rescue clause.
void rb_iter_break(void)
Breaks from a block.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eRuntimeError
RuntimeError exception.
VALUE rb_eStopIteration
StopIteration exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
VALUE rb_eArgError
ArgumentError exception.
void rb_warning(const char *fmt,...)
Issues a warning.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_mEnumerable
Enumerable module.
VALUE rb_cEnumerator
Enumerator class.
VALUE rb_cInteger
Module class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
double rb_num2dbl(VALUE num)
Converts an instance of rb_cNumeric into C's double.
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_funcall(VALUE recv, ID mid, int n,...)
Calls a method.
VALUE rb_funcallv(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcall(), except it takes the method arguments as a C array.
VALUE rb_funcallv_public(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it only takes public methods into account.
VALUE rb_ary_new_from_values(long n, const VALUE *elts)
Identical to rb_ary_new_from_args(), except how objects are passed.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_reverse(VALUE ary)
Destructively reverses the passed array in-place.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_resize(VALUE ary, long len)
Expands or shrinks the passed array to the passed length.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_clear(VALUE ary)
Destructively removes everything form an array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_sort_bang(VALUE ary)
Destructively sorts the passed array in-place, according to each elements' <=> result.
VALUE rb_assoc_new(VALUE car, VALUE cdr)
Identical to rb_ary_new_from_values(), except it expects exactly two parameters.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
VALUE rb_big_minus(VALUE x, VALUE y)
Performs subtraction of the passed two objects.
VALUE rb_big_plus(VALUE x, VALUE y)
Performs addition of the passed two objects.
VALUE rb_big_unpack(unsigned long *buf, long num_longs)
Constructs a (possibly very big) bignum from a series of integers.
double rb_big2dbl(VALUE x)
Converts a bignum into C's double.
int rb_cmpint(VALUE val, VALUE a, VALUE b)
Canonicalises the passed val, which is the return value of a <=> b, into C's {-1, 0,...
VALUE rb_enum_values_pack(int argc, const VALUE *argv)
Basically identical to rb_ary_new_form_values(), except it returns something different when argc < 2.
#define RETURN_SIZED_ENUMERATOR(obj, argc, argv, size_fn)
This roughly resembles return enum_for(__callee__) unless block_given?.
#define RETURN_ENUMERATOR(obj, argc, argv)
Identical to RETURN_SIZED_ENUMERATOR(), except its size is unknown.
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
void rb_hash_foreach(VALUE hash, int(*func)(VALUE key, VALUE val, VALUE arg), VALUE arg)
Iterates over a hash.
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_clear(VALUE hash)
Swipes everything out of the passed hash table.
VALUE rb_hash_new(void)
Creates a new, empty hash object.
VALUE rb_block_proc(void)
Constructs a Proc object from implicitly passed components.
int rb_range_values(VALUE range, VALUE *begp, VALUE *endp, int *exclp)
Deconstructs a range into its components.
VALUE rb_check_string_type(VALUE obj)
Try converting an object to its stringised representation using its to_str method,...
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
int rb_respond_to(VALUE obj, ID mid)
Queries if the object responds to the method.
int rb_method_basic_definition_p(VALUE klass, ID mid)
Well...
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
int rb_obj_respond_to(VALUE obj, ID mid, int private_p)
Identical to rb_respond_to(), except it additionally takes the visibility parameter.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
char * ptr
Pointer to the underlying memory region, of at least capa bytes.
int len
Length of the buffer.
void ruby_qsort(void *, const size_t, const size_t, int(*)(const void *, const void *, void *), void *)
Reentrant implementation of quick sort.
#define RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg)
Shim for block function parameters.
VALUE rb_block_call(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2)
Identical to rb_funcallv(), except it additionally passes a function as a block.
VALUE rb_yield_values(int n,...)
Identical to rb_yield(), except it takes variadic number of parameters and pass them to the block.
VALUE rb_yield_values2(int n, const VALUE *argv)
Identical to rb_yield_values(), except it takes the parameters as a C array instead of variadic argum...
VALUE rb_yield(VALUE val)
Yields the block.
rb_block_call_func * rb_block_call_func_t
Shorthand type that represents an iterator-written-in-C function pointer.
VALUE rb_block_call_func(RB_BLOCK_CALL_FUNC_ARGLIST(yielded_arg, callback_arg))
This is the type of a function that the interpreter expect for C-backended blocks.
VALUE rb_block_call_kw(VALUE obj, ID mid, int argc, const VALUE *argv, rb_block_call_func_t proc, VALUE data2, int kw_splat)
Identical to rb_funcallv_kw(), except it additionally passes a function as a block.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_LEN
Just another name of rb_array_len.
static void RARRAY_ASET(VALUE ary, long i, VALUE v)
Assigns an object in an array.
#define RARRAY_PTR_USE(ary, ptr_name, expr)
Declares a section of code where raw pointers are used.
#define RARRAY_AREF(a, i)
#define RBASIC(obj)
Convenient casting macro.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
#define RB_PASS_CALLED_KEYWORDS
Pass keywords if current method is called with keywords, useful for argument delegation.
#define RTEST
This is an old name of RB_TEST.
#define _(args)
This was a transition path from K&R to ANSI.
IFUNC (Internal FUNCtion)
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static void Check_Type(VALUE v, enum ruby_value_type t)
Identical to RB_TYPE_P(), except it raises exceptions on predication failure.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.