10#include "gc/mmtk/mmtk.h"
12#include "ccan/list/list.h"
16#include <sys/sysctl.h>
25 size_t total_allocated_objects;
31 struct ccan_list_head ractor_caches;
32 unsigned long live_ractor_cache_count;
34 pthread_mutex_t mutex;
37 pthread_cond_t cond_world_stopped;
38 pthread_cond_t cond_world_started;
39 size_t start_the_world_count;
41 struct rb_gc_vm_context vm_context;
43 unsigned int fork_hook_vm_lock_lev;
47 struct ccan_list_node list_node;
49 MMTk_Mutator *mutator;
59 MMTK_FINAL_JOB_FINALIZE,
68 VALUE finalizer_array;
73#ifdef RB_THREAD_LOCAL_SPECIFIER
76# error We currently need language-supported TLS
84 rb_mmtk_gc_thread_tls = gc_thread_tls;
88rb_mmtk_is_mutator(
void)
94rb_mmtk_stop_the_world(
void)
99 if ((err = pthread_mutex_lock(&
objspace->mutex)) != 0) {
100 rb_bug(
"ERROR: cannot lock objspace->mutex: %s", strerror(err));
107 if ((err = pthread_mutex_unlock(&
objspace->mutex)) != 0) {
108 rb_bug(
"ERROR: cannot release objspace->mutex: %s", strerror(err));
113rb_mmtk_resume_mutators(
void)
118 if ((err = pthread_mutex_lock(&
objspace->mutex)) != 0) {
119 rb_bug(
"ERROR: cannot lock objspace->mutex: %s", strerror(err));
124 pthread_cond_broadcast(&
objspace->cond_world_started);
126 if ((err = pthread_mutex_unlock(&
objspace->mutex)) != 0) {
127 rb_bug(
"ERROR: cannot release objspace->mutex: %s", strerror(err));
136 size_t starting_gc_count =
objspace->gc_count;
138 int lock_lev = RB_GC_VM_LOCK();
141 if ((err = pthread_mutex_lock(&
objspace->mutex)) != 0) {
142 rb_bug(
"ERROR: cannot lock objspace->mutex: %s", strerror(err));
145 if (
objspace->gc_count == starting_gc_count) {
148 rb_gc_initialize_vm_context(&
objspace->vm_context);
150 mutator->gc_mutator_p =
true;
154 clock_gettime(CLOCK_MONOTONIC, &gc_start_time);
157 rb_gc_save_machine_context();
163 pthread_cond_broadcast(&
objspace->cond_world_stopped);
172 clock_gettime(CLOCK_MONOTONIC, &gc_end_time);
175 (gc_end_time.tv_sec - gc_start_time.tv_sec) * (1000 * 1000 * 1000) +
176 (gc_end_time.tv_nsec - gc_start_time.tv_nsec);
180 if ((err = pthread_mutex_unlock(&
objspace->mutex)) != 0) {
181 rb_bug(
"ERROR: cannot release objspace->mutex: %s", strerror(err));
183 RB_GC_VM_UNLOCK(lock_lev);
187rb_mmtk_before_updating_jit_code(
void)
189 rb_gc_before_updating_jit_code();
193rb_mmtk_after_updating_jit_code(
void)
195 rb_gc_after_updating_jit_code();
199rb_mmtk_number_of_mutators(
void)
202 return objspace->live_ractor_cache_count;
206rb_mmtk_get_mutators(
void (*visit_mutator)(MMTk_Mutator *mutator,
void *data),
void *data)
211 ccan_list_for_each(&
objspace->ractor_caches, ractor_cache, list_node) {
212 visit_mutator(ractor_cache->mutator, data);
217rb_mmtk_scan_gc_roots(
void)
223 rb_gc_worker_thread_set_vm_context(&
objspace->vm_context);
225 rb_gc_worker_thread_unset_vm_context(&
objspace->vm_context);
229pin_value(st_data_t key, st_data_t value, st_data_t data)
231 rb_gc_impl_mark_and_pin((
void *)data, (
VALUE)value);
237rb_mmtk_scan_objspace(
void)
241 if (
objspace->finalizer_table != NULL) {
246 while (job != NULL) {
248 case MMTK_FINAL_JOB_DFREE:
250 case MMTK_FINAL_JOB_FINALIZE:
251 rb_gc_impl_mark(
objspace, job->as.finalize.finalizer_array);
254 rb_bug(
"rb_mmtk_scan_objspace: unknown final job type %d", job->kind);
262rb_mmtk_move_obj_during_marking(MMTk_ObjectReference from, MMTk_ObjectReference to)
264 rb_gc_move_obj_during_marking((
VALUE)from, (
VALUE)to);
268rb_mmtk_update_object_references(MMTk_ObjectReference mmtk_object)
273 rb_gc_update_object_references(rb_gc_get_objspace(),
object);
278rb_mmtk_call_gc_mark_children(MMTk_ObjectReference
object)
280 rb_gc_mark_children(rb_gc_get_objspace(), (
VALUE)
object);
284rb_mmtk_handle_weak_references(MMTk_ObjectReference mmtk_object,
bool moving)
288 rb_gc_handle_weak_references(
object);
291 rb_gc_update_object_references(rb_gc_get_objspace(),
object);
296rb_mmtk_call_obj_free(MMTk_ObjectReference
object)
302 rb_gc_worker_thread_set_vm_context(&
objspace->vm_context);
304 rb_gc_worker_thread_unset_vm_context(&
objspace->vm_context);
311rb_mmtk_vm_live_bytes(
void)
320 RUBY_ASSERT(mmtk_is_reachable((MMTk_ObjectReference)table));
326 job->next =
objspace->finalizer_jobs;
327 job->kind = MMTK_FINAL_JOB_FINALIZE;
328 job->as.finalize.finalizer_array = table;
334rb_mmtk_update_finalizer_table_i(st_data_t key, st_data_t value, st_data_t data)
337 RUBY_ASSERT(mmtk_is_reachable((MMTk_ObjectReference)value));
342 if (!mmtk_is_reachable((MMTk_ObjectReference)key)) {
354rb_mmtk_update_finalizer_table(
void)
359 st_foreach(
objspace->finalizer_table, rb_mmtk_update_finalizer_table_i, (st_data_t)
objspace);
363rb_mmtk_global_tables_count(
void)
365 return RB_GC_VM_WEAK_TABLE_COUNT;
368static inline VALUE rb_mmtk_call_object_closure(
VALUE obj,
bool pin);
371rb_mmtk_update_global_tables_i(
VALUE val,
void *data)
373 if (!mmtk_is_reachable((MMTk_ObjectReference)val)) {
378 if (rb_mmtk_call_object_closure(val,
false) != val) {
386rb_mmtk_update_global_tables_replace_i(
VALUE *ptr,
void *data)
389 *ptr = rb_mmtk_call_object_closure(*ptr,
false);
395rb_mmtk_update_global_tables(
int table)
400 rb_gc_vm_weak_table_foreach(
401 rb_mmtk_update_global_tables_i,
402 rb_mmtk_update_global_tables_replace_i,
405 (
enum rb_gc_vm_weak_tables)table
410rb_mmtk_special_const_p(MMTk_ObjectReference
object)
418rb_mmtk_mutator_thread_panic_handler(
void)
420 rb_bug(
"Ruby mutator thread panicked");
425 rb_mmtk_init_gc_worker_thread,
427 rb_mmtk_stop_the_world,
428 rb_mmtk_resume_mutators,
429 rb_mmtk_block_for_gc,
430 rb_mmtk_before_updating_jit_code,
431 rb_mmtk_after_updating_jit_code,
432 rb_mmtk_number_of_mutators,
433 rb_mmtk_get_mutators,
434 rb_mmtk_scan_gc_roots,
435 rb_mmtk_scan_objspace,
436 rb_mmtk_move_obj_during_marking,
437 rb_mmtk_update_object_references,
438 rb_mmtk_call_gc_mark_children,
439 rb_mmtk_handle_weak_references,
440 rb_mmtk_call_obj_free,
441 rb_mmtk_vm_live_bytes,
442 rb_mmtk_update_global_tables,
443 rb_mmtk_global_tables_count,
444 rb_mmtk_update_finalizer_table,
445 rb_mmtk_special_const_p,
446 rb_mmtk_mutator_thread_panic_handler,
450#define RB_MMTK_HEAP_LIMIT_PERC 80
451#define RB_MMTK_DEFAULT_HEAP_MIN (1024 * 1024)
452#define RB_MMTK_DEFAULT_HEAP_MAX (rb_mmtk_system_physical_memory() / 100 * RB_MMTK_HEAP_LIMIT_PERC)
455 RB_MMTK_DYNAMIC_HEAP,
460rb_mmtk_builder_init(
void)
462 MMTk_Builder *builder = mmtk_builder_default();
467rb_gc_impl_objspace_alloc(
void)
469 MMTk_Builder *builder = rb_mmtk_builder_init();
470 mmtk_init_binding(builder, NULL, &ruby_upcalls);
472 return calloc(1,
sizeof(
struct objspace));
475static void gc_run_finalizers(
void *data);
478rb_gc_impl_objspace_init(
void *objspace_ptr)
484 objspace->finalizer_table = st_init_numtable();
487 ccan_list_head_init(&
objspace->ractor_caches);
489 objspace->mutex = (pthread_mutex_t)PTHREAD_MUTEX_INITIALIZER;
490 objspace->cond_world_stopped = (pthread_cond_t)PTHREAD_COND_INITIALIZER;
491 objspace->cond_world_started = (pthread_cond_t)PTHREAD_COND_INITIALIZER;
495rb_gc_impl_objspace_free(
void *objspace_ptr)
501rb_gc_impl_ractor_cache_alloc(
void *objspace_ptr,
void *ractor)
504 if (
objspace->live_ractor_cache_count == 0) {
505 mmtk_initialize_collection(ractor);
507 objspace->live_ractor_cache_count++;
510 ccan_list_add(&
objspace->ractor_caches, &cache->list_node);
512 cache->mutator = mmtk_bind_mutator(cache);
513 cache->bump_pointer = mmtk_get_bump_pointer_allocator(cache->mutator);
519rb_gc_impl_ractor_cache_free(
void *objspace_ptr,
void *cache_ptr)
524 ccan_list_del(&cache->list_node);
527 objspace->live_ractor_cache_count--;
529 mmtk_destroy_mutator(cache->mutator);
532void rb_gc_impl_set_params(
void *objspace_ptr) { }
534static VALUE gc_verify_internal_consistency(
VALUE self) {
return Qnil; }
536#define MMTK_HEAP_COUNT 6
537#define MMTK_MAX_OBJ_SIZE 640
539static size_t heap_sizes[MMTK_HEAP_COUNT + 1] = {
540 32, 40, 80, 160, 320, MMTK_MAX_OBJ_SIZE, 0
546 VALUE gc_constants = rb_hash_new();
549 rb_hash_aset(gc_constants,
ID2SYM(rb_intern(
"RVALUE_OVERHEAD")),
INT2NUM(0));
550 rb_hash_aset(gc_constants,
ID2SYM(rb_intern(
"RVARGC_MAX_ALLOCATE_SIZE")),
LONG2FIX(MMTK_MAX_OBJ_SIZE));
552 rb_hash_aset(gc_constants,
ID2SYM(rb_intern(
"SIZE_POOL_COUNT")),
LONG2FIX(5));
554 rb_define_const(
rb_mGC,
"INTERNAL_CONSTANTS", gc_constants);
567rb_gc_impl_heap_sizes(
void *objspace_ptr)
573rb_mmtk_obj_free_iter_wrapper(
VALUE obj,
void *data)
578 rb_gc_obj_free_vm_weak_references(obj);
589rb_gc_impl_shutdown_free_objects(
void *objspace_ptr)
591 mmtk_set_gc_enabled(
false);
592 each_object(objspace_ptr, rb_mmtk_obj_free_iter_wrapper, objspace_ptr);
593 mmtk_set_gc_enabled(
true);
598rb_gc_impl_start(
void *objspace_ptr,
bool full_mark,
bool immediate_mark,
bool immediate_sweep,
bool compact)
600 mmtk_handle_user_collection_request(rb_gc_get_ractor_newobj_cache(),
true, full_mark);
604rb_gc_impl_during_gc_p(
void *objspace_ptr)
611rb_gc_impl_prepare_heap_i(MMTk_ObjectReference obj,
void *d)
613 rb_gc_prepare_heap_process_object((
VALUE)obj);
617rb_gc_impl_prepare_heap(
void *objspace_ptr)
619 mmtk_enumerate_objects(rb_gc_impl_prepare_heap_i, NULL);
623rb_gc_impl_gc_enable(
void *objspace_ptr)
625 mmtk_set_gc_enabled(
true);
629rb_gc_impl_gc_disable(
void *objspace_ptr,
bool finish_current_gc)
631 mmtk_set_gc_enabled(
false);
635rb_gc_impl_gc_enabled_p(
void *objspace_ptr)
637 return mmtk_gc_enabled_p();
641rb_gc_impl_stress_set(
void *objspace_ptr,
VALUE flag)
649rb_gc_impl_stress_get(
void *objspace_ptr)
657rb_gc_impl_config_get(
void *objspace_ptr)
659 VALUE hash = rb_hash_new();
664 size_t heap_min = mmtk_heap_min();
672rb_gc_impl_config_set(
void *objspace_ptr,
VALUE hash)
683 if (bump_pointer == NULL)
return 0;
685 uintptr_t new_cursor = bump_pointer->cursor + size;
687 if (new_cursor > bump_pointer->limit) {
692 bump_pointer->cursor = new_cursor;
698rb_gc_impl_new_obj(
void *objspace_ptr,
void *cache_ptr,
VALUE klass,
VALUE flags,
bool wb_protected,
size_t alloc_size)
700#define MMTK_ALLOCATION_SEMANTICS_DEFAULT 0
704 if (alloc_size > MMTK_MAX_OBJ_SIZE) rb_bug(
"too big");
705 for (
int i = 0; i < MMTK_HEAP_COUNT; i++) {
706 if (alloc_size == heap_sizes[i])
break;
707 if (alloc_size < heap_sizes[i]) {
708 alloc_size = heap_sizes[i];
714 mmtk_handle_user_collection_request(ractor_cache,
false,
false);
717 alloc_size +=
sizeof(
VALUE);
719 VALUE *alloc_obj = (
VALUE *)rb_mmtk_alloc_fast_path(
objspace, ractor_cache, alloc_size);
721 alloc_obj = mmtk_alloc(ractor_cache->mutator, alloc_size, MMTk_MIN_OBJ_ALIGN, 0, MMTK_ALLOCATION_SEMANTICS_DEFAULT);
725 alloc_obj[-1] = alloc_size -
sizeof(
VALUE);
726 alloc_obj[0] = flags;
727 alloc_obj[1] = klass;
730 mmtk_post_alloc(ractor_cache->mutator, (
void*)alloc_obj, alloc_size, MMTK_ALLOCATION_SEMANTICS_DEFAULT);
733 mmtk_add_obj_free_candidate(alloc_obj);
735 objspace->total_allocated_objects++;
737 return (
VALUE)alloc_obj;
741rb_gc_impl_obj_slot_size(
VALUE obj)
743 return ((
VALUE *)obj)[-1];
747rb_gc_impl_heap_id_for_size(
void *objspace_ptr,
size_t size)
749 for (
int i = 0; i < MMTK_HEAP_COUNT; i++) {
750 if (size == heap_sizes[i])
return i;
751 if (size < heap_sizes[i])
return i;
754 rb_bug(
"size too big");
758rb_gc_impl_size_allocatable_p(
size_t size)
760 return size <= MMTK_MAX_OBJ_SIZE;
765rb_gc_impl_malloc(
void *objspace_ptr,
size_t size,
bool gc_allowed)
772rb_gc_impl_calloc(
void *objspace_ptr,
size_t size,
bool gc_allowed)
775 return calloc(1, size);
779rb_gc_impl_realloc(
void *objspace_ptr,
void *ptr,
size_t new_size,
size_t old_size,
bool gc_allowed)
782 return realloc(ptr, new_size);
786rb_gc_impl_free(
void *objspace_ptr,
void *ptr,
size_t old_size)
792void rb_gc_impl_adjust_memory_usage(
void *objspace_ptr, ssize_t diff) { }
796rb_mmtk_call_object_closure(
VALUE obj,
bool pin)
800 rb_mmtk_gc_thread_tls->gc_context,
801 (MMTk_ObjectReference)obj,
807rb_gc_impl_mark(
void *objspace_ptr,
VALUE obj)
811 rb_mmtk_call_object_closure(obj,
false);
815rb_gc_impl_mark_and_move(
void *objspace_ptr,
VALUE *ptr)
819 VALUE new_obj = rb_mmtk_call_object_closure(*ptr,
false);
820 if (new_obj != *ptr) {
826rb_gc_impl_mark_and_pin(
void *objspace_ptr,
VALUE obj)
830 rb_mmtk_call_object_closure(obj,
true);
834rb_gc_impl_mark_maybe(
void *objspace_ptr,
VALUE obj)
836 if (rb_gc_impl_pointer_to_heap_p(objspace_ptr, (
const void *)obj)) {
837 rb_gc_impl_mark_and_pin(objspace_ptr, obj);
842rb_gc_impl_declare_weak_references(
void *objspace_ptr,
VALUE obj)
845 mmtk_declare_weak_references((MMTk_ObjectReference)obj);
849rb_gc_impl_handle_weak_references_alive_p(
void *objspace_ptr,
VALUE obj)
851 return mmtk_weak_references_alive_p((MMTk_ObjectReference)obj);
856rb_gc_impl_register_pinning_obj(
void *objspace_ptr,
VALUE obj)
858 mmtk_register_pinning_obj((MMTk_ObjectReference)obj);
862rb_gc_impl_object_moved_p(
void *objspace_ptr,
VALUE obj)
864 return rb_mmtk_call_object_closure(obj,
false) != obj;
868rb_gc_impl_location(
void *objspace_ptr,
VALUE obj)
870 return rb_mmtk_call_object_closure(obj,
false);
875rb_gc_impl_writebarrier(
void *objspace_ptr,
VALUE a,
VALUE b)
881 mmtk_object_reference_write_post(cache->mutator, (MMTk_ObjectReference)a);
885rb_gc_impl_writebarrier_unprotect(
void *objspace_ptr,
VALUE obj)
887 mmtk_register_wb_unprotected_object((MMTk_ObjectReference)obj);
891rb_gc_impl_writebarrier_remember(
void *objspace_ptr,
VALUE obj)
895 mmtk_object_reference_write_post(cache->mutator, (MMTk_ObjectReference)obj);
900each_objects_i(MMTk_ObjectReference obj,
void *d)
902 rb_darray(
VALUE) *objs = d;
904 rb_darray_append(objs, (
VALUE)obj);
910 rb_darray(
VALUE) objs;
911 rb_darray_make(&objs, 0);
913 mmtk_enumerate_objects(each_objects_i, &objs);
916 rb_darray_foreach(objs, i, obj_ptr) {
917 if (!mmtk_is_mmtk_object((MMTk_ObjectReference)*obj_ptr))
continue;
919 if (func(*obj_ptr, data) != 0) {
924 rb_darray_free(objs);
928 int (*func)(
void *,
void *, size_t,
void *);
933rb_gc_impl_each_objects_i(
VALUE obj,
void *d)
937 size_t slot_size = rb_gc_impl_obj_slot_size(obj);
939 return data->func((
void *)obj, (
void *)(obj + slot_size), slot_size, data->data);
943rb_gc_impl_each_objects(
void *objspace_ptr,
int (*func)(
void *,
void *,
size_t,
void *),
void *data)
950 each_object(objspace_ptr, rb_gc_impl_each_objects_i, &each_objects_data);
954 void (*func)(
VALUE,
void *);
959rb_gc_impl_each_object_i(
VALUE obj,
void *d)
963 data->func(obj, data->data);
969rb_gc_impl_each_object(
void *objspace_ptr,
void (*func)(
VALUE,
void *),
void *data)
976 each_object(objspace_ptr, rb_gc_impl_each_object_i, &each_object_data);
981gc_run_finalizers_get_final(
long i,
void *data)
989gc_run_finalizers(
void *data)
993 rb_gc_set_pending_interrupt();
995 while (
objspace->finalizer_jobs != NULL) {
997 objspace->finalizer_jobs = job->next;
1000 case MMTK_FINAL_JOB_DFREE:
1001 job->as.dfree.func(job->as.dfree.data);
1003 case MMTK_FINAL_JOB_FINALIZE: {
1004 VALUE finalizer_array = job->as.finalize.finalizer_array;
1006 rb_gc_run_obj_finalizer(
1009 gc_run_finalizers_get_final,
1010 (
void *)finalizer_array
1021 rb_gc_unset_pending_interrupt();
1025rb_gc_impl_make_zombie(
void *objspace_ptr,
VALUE obj,
void (*dfree)(
void *),
void *data)
1027 if (dfree == NULL)
return;
1032 job->kind = MMTK_FINAL_JOB_DFREE;
1033 job->as.dfree.func = dfree;
1034 job->as.dfree.data = data;
1038 job->next =
objspace->finalizer_jobs;
1040 }
while (prev != job->next);
1042 if (!ruby_free_at_exit_p()) {
1048rb_gc_impl_define_finalizer(
void *objspace_ptr,
VALUE obj,
VALUE block)
1056 int lev = RB_GC_VM_LOCK();
1058 if (st_lookup(
objspace->finalizer_table, obj, &data)) {
1059 table = (
VALUE)data;
1066 for (i = 0; i <
len; i++) {
1069 RB_GC_VM_UNLOCK(lev);
1080 st_add_direct(
objspace->finalizer_table, obj, table);
1083 RB_GC_VM_UNLOCK(lev);
1089rb_gc_impl_undefine_finalizer(
void *objspace_ptr,
VALUE obj)
1093 st_data_t data = obj;
1095 int lev = RB_GC_VM_LOCK();
1096 st_delete(
objspace->finalizer_table, &data, 0);
1097 RB_GC_VM_UNLOCK(lev);
1103rb_gc_impl_copy_finalizer(
void *objspace_ptr,
VALUE dest,
VALUE obj)
1111 int lev = RB_GC_VM_LOCK();
1112 if (RB_LIKELY(st_lookup(
objspace->finalizer_table, obj, &data))) {
1115 st_insert(
objspace->finalizer_table, dest, table);
1119 rb_bug(
"rb_gc_copy_finalizer: FL_FINALIZE set but not found in finalizer_table: %s", rb_obj_info(obj));
1121 RB_GC_VM_UNLOCK(lev);
1125move_finalizer_from_table_i(st_data_t key, st_data_t val, st_data_t arg)
1135rb_gc_impl_shutdown_call_finalizer(
void *objspace_ptr)
1139 while (
objspace->finalizer_table->num_entries) {
1140 st_foreach(
objspace->finalizer_table, move_finalizer_from_table_i, (st_data_t)
objspace);
1145 unsigned int lev = RB_GC_VM_LOCK();
1148 for (
size_t i = 0; i < registered_candidates.len; i++) {
1149 VALUE obj = (
VALUE)registered_candidates.ptr[i];
1151 if (rb_gc_shutdown_call_finalizer_p(obj)) {
1152 rb_gc_obj_free(objspace_ptr, obj);
1156 mmtk_free_raw_vec_of_obj_ref(registered_candidates);
1158 RB_GC_VM_UNLOCK(lev);
1166rb_gc_impl_before_fork(
void *objspace_ptr)
1171 objspace->fork_hook_vm_lock_lev = RB_GC_VM_LOCK();
1185 if (mutator_blocking_count != 0) {
1186 RB_GC_VM_UNLOCK(
objspace->fork_hook_vm_lock_lev);
1194rb_gc_impl_after_fork(
void *objspace_ptr, rb_pid_t pid)
1198 mmtk_after_fork(rb_gc_get_ractor_newobj_cache());
1200 RB_GC_VM_UNLOCK(
objspace->fork_hook_vm_lock_lev);
1206rb_gc_impl_set_measure_total_time(
void *objspace_ptr,
VALUE flag)
1214rb_gc_impl_get_measure_total_time(
void *objspace_ptr)
1222rb_gc_impl_get_total_time(
void *objspace_ptr)
1230rb_gc_impl_gc_count(
void *objspace_ptr)
1238rb_gc_impl_latest_gc_info(
void *objspace_ptr,
VALUE hash_or_key)
1249 rb_bug(
"gc_info_decode: non-hash or symbol given");
1252#define SET(name, attr) \
1253 if (key == ID2SYM(rb_intern_const(#name))) \
1255 else if (hash != Qnil) \
1256 rb_hash_aset(hash, ID2SYM(rb_intern_const(#name)), (attr));
1274 gc_stat_sym_total_allocated_objects,
1275 gc_stat_sym_total_bytes,
1276 gc_stat_sym_used_bytes,
1277 gc_stat_sym_free_bytes,
1278 gc_stat_sym_starting_heap_address,
1279 gc_stat_sym_last_heap_address,
1283static VALUE gc_stat_symbols[gc_stat_sym_last];
1286setup_gc_stat_symbols(
void)
1288 if (gc_stat_symbols[0] == 0) {
1289#define S(s) gc_stat_symbols[gc_stat_sym_##s] = ID2SYM(rb_intern_const(#s))
1292 S(total_allocated_objects);
1296 S(starting_heap_address);
1297 S(last_heap_address);
1302rb_gc_impl_stat(
void *objspace_ptr,
VALUE hash_or_sym)
1307 setup_gc_stat_symbols();
1316 rb_bug(
"non-hash or symbol given");
1319#define SET(name, attr) \
1320 if (key == gc_stat_symbols[gc_stat_sym_##name]) \
1321 return SIZET2NUM(attr); \
1322 else if (hash != Qnil) \
1323 rb_hash_aset(hash, gc_stat_symbols[gc_stat_sym_##name], SIZET2NUM(attr));
1326 SET(time,
objspace->total_gc_time / (1000 * 1000));
1327 SET(total_allocated_objects,
objspace->total_allocated_objects);
1328 SET(total_bytes, mmtk_total_bytes());
1329 SET(used_bytes, mmtk_used_bytes());
1330 SET(free_bytes, mmtk_free_bytes());
1331 SET(starting_heap_address, (
size_t)mmtk_starting_heap_address());
1332 SET(last_heap_address, (
size_t)mmtk_last_heap_address());
1344rb_gc_impl_stat_heap(
void *objspace_ptr,
VALUE heap_name,
VALUE hash_or_sym)
1356#define RB_GC_OBJECT_METADATA_ENTRY_COUNT 1
1360rb_gc_impl_object_metadata(
void *objspace_ptr,
VALUE obj)
1362 static ID ID_object_id;
1364 if (!ID_object_id) {
1365#define I(s) ID_##s = rb_intern(#s);
1372#define SET_ENTRY(na, v) do { \
1373 RUBY_ASSERT(n <= RB_GC_OBJECT_METADATA_ENTRY_COUNT); \
1374 object_metadata_entries[n].name = ID_##na; \
1375 object_metadata_entries[n].val = v; \
1379 if (rb_obj_id_p(obj)) SET_ENTRY(object_id, rb_obj_id(obj));
1381 object_metadata_entries[n].name = 0;
1382 object_metadata_entries[n].val = 0;
1384 return object_metadata_entries;
1388rb_gc_impl_pointer_to_heap_p(
void *objspace_ptr,
const void *ptr)
1390 if (ptr == NULL)
return false;
1391 if ((uintptr_t)ptr %
sizeof(
void*) != 0)
return false;
1392 return mmtk_is_mmtk_object((MMTk_Address)ptr);
1396rb_gc_impl_garbage_object_p(
void *objspace_ptr,
VALUE obj)
1401void rb_gc_impl_set_event_hook(
void *objspace_ptr,
const rb_event_flag_t event) { }
1404rb_gc_impl_copy_attributes(
void *objspace_ptr,
VALUE dest,
VALUE obj)
1406 if (mmtk_object_wb_unprotected_p((MMTk_ObjectReference)obj)) {
1407 rb_gc_impl_writebarrier_unprotect(objspace_ptr, dest);
1410 rb_gc_impl_copy_finalizer(objspace_ptr, dest, obj);
1416rb_gc_impl_active_gc_name(
void)
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ATOMIC_INC(var)
Atomically increments the value pointed by var.
#define RUBY_ATOMIC_PTR_CAS(var, oldval, newval)
Identical to RUBY_ATOMIC_CAS, except it expects its arguments are void*.
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
#define RUBY_ATOMIC_DEC(var)
Atomically decrements the value pointed by var.
#define RUBY_ATOMIC_LOAD(var)
Atomic load.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
unsigned int rb_postponed_job_handle_t
The type of a handle returned from rb_postponed_job_preregister and passed to rb_postponed_job_trigge...
void rb_postponed_job_trigger(rb_postponed_job_handle_t h)
Triggers a pre-registered job registered with rb_postponed_job_preregister, scheduling it for executi...
rb_postponed_job_handle_t rb_postponed_job_preregister(unsigned int flags, rb_postponed_job_func_t func, void *data)
Pre-registers a func in Ruby's postponed job preregistration table, returning an opaque handle which ...
#define RUBY_INTERNAL_EVENT_FREEOBJ
Object swept.
#define RUBY_INTERNAL_EVENT_GC_START
GC started.
uint32_t rb_event_flag_t
Represents event(s).
static VALUE RB_FL_TEST(VALUE obj, VALUE flags)
Tests if the given flag(s) are set or not.
static void RB_FL_SET(VALUE obj, VALUE flags)
Sets the given flag(s).
static void RB_FL_UNSET(VALUE obj, VALUE flags)
Clears the given flag(s).
@ RUBY_FL_FINALIZE
This flag has something to do with finalisers.
@ RUBY_FL_WEAK_REFERENCE
This object weakly refers to other objects.
#define xfree
Old name of ruby_xfree.
#define Qundef
Old name of RUBY_Qundef.
#define ID2SYM
Old name of RB_ID2SYM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define OBJ_FREEZE
Old name of RB_OBJ_FREEZE.
#define T_NONE
Old name of RUBY_T_NONE.
#define SIZET2NUM
Old name of RB_SIZE2NUM.
#define xmalloc
Old name of ruby_xmalloc.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FL_FINALIZE
Old name of RUBY_FL_FINALIZE.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_SET
Old name of RB_FL_SET.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define Qtrue
Old name of RUBY_Qtrue.
#define INT2NUM
Old name of RB_INT2NUM.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define NIL_P
Old name of RB_NIL_P.
#define FL_TEST
Old name of RB_FL_TEST.
#define FL_UNSET
Old name of RB_FL_UNSET.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
VALUE rb_equal(VALUE lhs, VALUE rhs)
This function is an optimised version of calling #==.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
#define rb_str_new_cstr(str)
Identical to rb_str_new, except it assumes the passed pointer is a pointer to a C string.
VALUE rb_f_notimplement(int argc, const VALUE *argv, VALUE obj, VALUE marker)
Raises rb_eNotImpError.
static ID rb_intern_const(const char *str)
This is a "tiny optimisation" over rb_intern().
int len
Length of the buffer.
#define RB_ULONG2NUM
Just another name of rb_ulong2num_inline.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define RARRAY_LEN
Just another name of rb_array_len.
static void RARRAY_ASET(VALUE ary, long i, VALUE v)
Assigns an object in an array.
#define RARRAY_AREF(a, i)
#define RBASIC(obj)
Convenient casting macro.
int ruby_native_thread_p(void)
Queries if the thread which calls this function is a ruby's thread.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
void * rust_closure
The pointer to the Rust-level closure object.
MMTk_ObjectClosureFunction c_function
The function to be called from C.
Ruby object's base components.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.