2#include "internal/gc.h"
3#include "internal/concurrent_set.h"
4#include "ruby_atomic.h"
8enum concurrent_set_special_values {
10 CONCURRENT_SET_DELETED,
12 CONCURRENT_SET_SPECIAL_VALUE_COUNT
22 unsigned int capacity;
23 unsigned int deleted_entries;
29concurrent_set_free(
void *ptr)
36concurrent_set_size(
const void *ptr)
49concurrent_set_mark(
void *ptr)
57 .dmark = concurrent_set_mark,
58 .dfree = concurrent_set_free,
59 .dsize = concurrent_set_size,
62 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_EMBEDDABLE
72 set->capacity = capacity;
77rb_concurrent_set_size(
VALUE set_obj)
93 RUBY_ASSERT((set->capacity & (set->capacity - 1)) == 0);
95 probe->mask = set->capacity - 1;
96 probe->idx = hash & probe->mask;
104 probe->idx = (probe->idx + probe->d) & probe->mask;
109concurrent_set_try_resize_without_locking(
VALUE old_set_obj,
VALUE *set_obj_ptr)
112 if (RUBY_ATOMIC_VALUE_LOAD(*set_obj_ptr) != old_set_obj) {
116 struct concurrent_set *old_set = RTYPEDDATA_GET_DATA(old_set_obj);
120 int expected_size =
RUBY_ATOMIC_LOAD(old_set->size) - old_set->deleted_entries;
123 int old_capacity = old_set->capacity;
124 int new_capacity = old_capacity * 2;
125 if (new_capacity > expected_size * 8) {
126 new_capacity = old_capacity / 2;
128 else if (new_capacity > expected_size * 4) {
129 new_capacity = old_capacity;
133 VALUE new_set_obj = rb_concurrent_set_new(old_set->funcs, new_capacity);
134 struct concurrent_set *new_set = RTYPEDDATA_GET_DATA(new_set_obj);
136 for (
int i = 0; i < old_capacity; i++) {
141 if (key < CONCURRENT_SET_SPECIAL_VALUE_COUNT)
continue;
144 VALUE hash = RUBY_ATOMIC_VALUE_LOAD(entry->hash);
148 hash = old_set->funcs->hash(key);
154 int idx = concurrent_set_probe_start(&probe, new_set, hash);
159 if (entry->key == CONCURRENT_SET_EMPTY) {
162 RUBY_ASSERT(new_set->size <= new_set->capacity / 2);
170 RUBY_ASSERT(entry->key >= CONCURRENT_SET_SPECIAL_VALUE_COUNT);
173 idx = concurrent_set_probe_next(&probe);
183concurrent_set_try_resize(
VALUE old_set_obj,
VALUE *set_obj_ptr)
186 concurrent_set_try_resize_without_locking(old_set_obj, set_obj_ptr);
191rb_concurrent_set_find(
VALUE *set_obj_ptr,
VALUE key)
193 RUBY_ASSERT(key >= CONCURRENT_SET_SPECIAL_VALUE_COUNT);
199 set_obj = RUBY_ATOMIC_VALUE_LOAD(*set_obj_ptr);
206 hash = set->funcs->hash(key);
211 int idx = concurrent_set_probe_start(&probe, set, hash);
215 VALUE curr_key = RUBY_ATOMIC_VALUE_LOAD(entry->key);
218 case CONCURRENT_SET_EMPTY:
220 case CONCURRENT_SET_DELETED:
222 case CONCURRENT_SET_MOVED:
228 VALUE curr_hash = RUBY_ATOMIC_VALUE_LOAD(entry->hash);
229 if (curr_hash != 0 && curr_hash != hash)
break;
231 if (UNLIKELY(!
RB_SPECIAL_CONST_P(curr_key) && rb_objspace_garbage_object_p(curr_key))) {
238 if (set->funcs->cmp(key, curr_key)) {
248 idx = concurrent_set_probe_next(&probe);
253rb_concurrent_set_find_or_insert(
VALUE *set_obj_ptr,
VALUE key,
void *data)
255 RUBY_ASSERT(key >= CONCURRENT_SET_SPECIAL_VALUE_COUNT);
257 bool inserting =
false;
262 set_obj = RUBY_ATOMIC_VALUE_LOAD(*set_obj_ptr);
269 hash = set->funcs->hash(key);
274 int idx = concurrent_set_probe_start(&probe, set, hash);
278 VALUE curr_key = RUBY_ATOMIC_VALUE_LOAD(entry->key);
281 case CONCURRENT_SET_EMPTY: {
284 key = set->funcs->create(key, data);
291 if (UNLIKELY(prev_size > set->capacity / 2)) {
292 concurrent_set_try_resize(set_obj, set_obj_ptr);
298 if (curr_key == CONCURRENT_SET_EMPTY) {
312 case CONCURRENT_SET_DELETED:
314 case CONCURRENT_SET_MOVED:
320 VALUE curr_hash = RUBY_ATOMIC_VALUE_LOAD(entry->hash);
321 if (curr_hash != 0 && curr_hash != hash)
break;
323 if (UNLIKELY(!
RB_SPECIAL_CONST_P(curr_key) && rb_objspace_garbage_object_p(curr_key))) {
330 if (set->funcs->cmp(key, curr_key)) {
338 if (set->funcs->free) set->funcs->free(key);
348 idx = concurrent_set_probe_next(&probe);
353rb_concurrent_set_delete_by_identity(
VALUE set_obj,
VALUE key)
360 VALUE hash = set->funcs->hash(key);
363 int idx = concurrent_set_probe_start(&probe, set, hash);
367 VALUE curr_key = RUBY_ATOMIC_VALUE_LOAD(entry->key);
370 case CONCURRENT_SET_EMPTY:
373 case CONCURRENT_SET_DELETED:
375 case CONCURRENT_SET_MOVED:
376 rb_bug(
"rb_concurrent_set_delete_by_identity: moved entry");
379 if (key == curr_key) {
380 entry->key = CONCURRENT_SET_DELETED;
381 set->deleted_entries++;
387 idx = concurrent_set_probe_next(&probe);
392rb_concurrent_set_foreach_with_replace(
VALUE set_obj,
int (*callback)(
VALUE *key,
void *data),
void *data)
399 for (
unsigned int i = 0; i < set->capacity; i++) {
401 VALUE key = set->entries[i].key;
404 case CONCURRENT_SET_EMPTY:
405 case CONCURRENT_SET_DELETED:
407 case CONCURRENT_SET_MOVED:
408 rb_bug(
"rb_concurrent_set_foreach_with_replace: moved entry");
411 int ret = callback(&entry->key, data);
416 set->entries[i].key = CONCURRENT_SET_DELETED;
417 set->deleted_entries++;
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval)
Identical to RUBY_ATOMIC_CAS, except it expects its arguments are VALUE.
#define RUBY_ATOMIC_VALUE_SET(var, val)
Identical to RUBY_ATOMIC_SET, except it expects its arguments are VALUE.
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
#define RUBY_ATOMIC_FETCH_ADD(var, val)
Atomically replaces the value pointed by var with the result of addition of val to the old value of v...
#define RUBY_ATOMIC_VALUE_EXCHANGE(var, val)
Identical to RUBY_ATOMIC_EXCHANGE, except it expects its arguments are VALUE.
#define RUBY_ATOMIC_DEC(var)
Atomically decrements the value pointed by var.
#define RUBY_ATOMIC_LOAD(var)
Atomic load.
#define xfree
Old name of ruby_xfree.
#define ZALLOC_N
Old name of RB_ZALLOC_N.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define TypedData_Make_Struct(klass, type, data_type, sval)
Identical to TypedData_Wrap_Struct, except it allocates a new data region internally instead of takin...
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
This is the struct that holds necessary info for a struct.
const char * wrap_struct_name
Name of structs of this kind.
uintptr_t VALUE
Type that represents a Ruby object.