11#include "ruby/internal/config.h"
15#include "internal/compilers.h"
20# define USE_MODULAR_GC 0
23#if defined(__x86_64__) && !defined(_ILP32) && defined(__GNUC__)
24#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("movq\t%%rsp, %0" : "=r" (*(p)))
25#elif defined(__i386) && defined(__GNUC__)
26#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("movl\t%%esp, %0" : "=r" (*(p)))
27#elif (defined(__powerpc__) || defined(__powerpc64__)) && defined(__GNUC__) && !defined(_AIX) && !defined(__APPLE__)
28#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mr\t%0, %%r1" : "=r" (*(p)))
29#elif (defined(__powerpc__) || defined(__powerpc64__)) && defined(__GNUC__) && defined(_AIX)
30#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mr %0,1" : "=r" (*(p)))
31#elif defined(__POWERPC__) && defined(__APPLE__)
32#define SET_MACHINE_STACK_END(p) __asm__ volatile("mr %0, r1" : "=r" (*(p)))
33#elif defined(__aarch64__) && defined(__GNUC__)
34#define SET_MACHINE_STACK_END(p) __asm__ __volatile__ ("mov\t%0, sp" : "=r" (*(p)))
36NOINLINE(
void rb_gc_set_stack_end(
VALUE **stack_end_p));
37#define SET_MACHINE_STACK_END(p) rb_gc_set_stack_end(p)
38#define USE_CONSERVATIVE_STACK_END
43#ifndef RUBY_MARK_FREE_DEBUG
44#define RUBY_MARK_FREE_DEBUG 0
47#if RUBY_MARK_FREE_DEBUG
48extern int ruby_gc_debug_indent;
51rb_gc_debug_indent(
void)
53 ruby_debug_printf(
"%*s", ruby_gc_debug_indent,
"");
57rb_gc_debug_body(
const char *mode,
const char *msg,
int st,
void *ptr)
60 ruby_gc_debug_indent--;
63 ruby_debug_printf(
"%s: %s %s (%p)\n", mode, st ?
"->" :
"<-", msg, ptr);
66 ruby_gc_debug_indent++;
72#define RUBY_MARK_ENTER(msg) rb_gc_debug_body("mark", (msg), 1, ptr)
73#define RUBY_MARK_LEAVE(msg) rb_gc_debug_body("mark", (msg), 0, ptr)
74#define RUBY_FREE_ENTER(msg) rb_gc_debug_body("free", (msg), 1, ptr)
75#define RUBY_FREE_LEAVE(msg) rb_gc_debug_body("free", (msg), 0, ptr)
76#define RUBY_GC_INFO rb_gc_debug_indent(), ruby_debug_printf
79#define RUBY_MARK_ENTER(msg)
80#define RUBY_MARK_LEAVE(msg)
81#define RUBY_FREE_ENTER(msg)
82#define RUBY_FREE_LEAVE(msg)
83#define RUBY_GC_INFO if(0)printf
86#if STACK_GROW_DIRECTION > 0
87# define STACK_UPPER(x, a, b) (a)
88#elif STACK_GROW_DIRECTION < 0
89# define STACK_UPPER(x, a, b) (b)
92int ruby_get_stack_grow_direction(
volatile VALUE *addr);
93# define stack_growup_p(x) ( \
94 (ruby_stack_grow_direction ? \
95 ruby_stack_grow_direction : \
96 ruby_get_stack_grow_direction(x)) > 0)
97# define STACK_UPPER(x, a, b) (stack_growup_p(x) ? (a) : (b))
108#if STACK_GROW_DIRECTION
109#define STACK_GROW_DIR_DETECTION
110#define STACK_DIR_UPPER(a,b) STACK_UPPER(0, (a), (b))
112#define STACK_GROW_DIR_DETECTION VALUE stack_grow_dir_detection
113#define STACK_DIR_UPPER(a,b) STACK_UPPER(&stack_grow_dir_detection, (a), (b))
115#define IS_STACK_DIR_UPPER() STACK_DIR_UPPER(1,0)
117const char *rb_obj_info(
VALUE obj);
118const char *rb_raw_obj_info(
char *
const buff,
const size_t buff_size,
VALUE obj);
123#define EC_NEWOBJ_OF(var, T, c, f, s, ec) \
124 T *(var) = (T *)rb_ec_newobj_of((ec), (c), (f), s)
125#define NEWOBJ_OF(var, T, c, f, s) EC_NEWOBJ_OF(var, T, c, f, s, GET_EC())
126#define UNPROTECTED_NEWOBJ_OF(var, T, c, f, s) \
127 T *(var) = (T *)rb_newobj((GET_EC()), (c), (f), 0 , false, s)
129#ifndef RB_GC_OBJECT_METADATA_ENTRY_DEFINED
130# define RB_GC_OBJECT_METADATA_ENTRY_DEFINED
137#ifndef USE_UNALIGNED_MEMBER_ACCESS
138# define UNALIGNED_MEMBER_ACCESS(expr) (expr)
139#elif ! USE_UNALIGNED_MEMBER_ACCESS
140# define UNALIGNED_MEMBER_ACCESS(expr) (expr)
141#elif ! (__has_warning("-Waddress-of-packed-member") || GCC_VERSION_SINCE(9, 0, 0))
142# define UNALIGNED_MEMBER_ACCESS(expr) (expr)
144# include "internal/warnings.h"
145# define UNALIGNED_MEMBER_ACCESS(expr) __extension__({ \
146 COMPILER_WARNING_PUSH; \
147 COMPILER_WARNING_IGNORED(-Waddress-of-packed-member); \
148 __typeof__(expr) unaligned_member_access_result = (expr); \
149 COMPILER_WARNING_POP; \
150 unaligned_member_access_result; \
153# define UNALIGNED_MEMBER_PTR(ptr, mem) __extension__({ \
154 COMPILER_WARNING_PUSH; \
155 COMPILER_WARNING_IGNORED(-Waddress-of-packed-member); \
156 const volatile void *unaligned_member_ptr_result = &(ptr)->mem; \
157 COMPILER_WARNING_POP; \
158 (__typeof__((ptr)->mem) *)unaligned_member_ptr_result; \
162#ifndef UNALIGNED_MEMBER_PTR
163# define UNALIGNED_MEMBER_PTR(ptr, mem) UNALIGNED_MEMBER_ACCESS(&(ptr)->mem)
166#define RB_OBJ_WRITE_UNALIGNED(old, slot, young) do { \
167 VALUE *_slot = UNALIGNED_MEMBER_ACCESS(slot); \
168 RB_OBJ_WRITE(old, _slot, young); \
175#define DURING_GC_COULD_MALLOC_REGION_START() \
176 assert(rb_during_gc()); \
177 VALUE _already_disabled = rb_gc_disable_no_rest()
179#define DURING_GC_COULD_MALLOC_REGION_END() \
180 if (_already_disabled == Qfalse) rb_gc_enable()
183RUBY_ATTR_MALLOC
void *ruby_mimmalloc(
size_t size);
184RUBY_ATTR_MALLOC
void *ruby_mimcalloc(
size_t num,
size_t size);
185void ruby_mimfree(
void *ptr);
186void rb_gc_prepare_heap(
void);
190void ruby_gc_set_params(
void);
191void rb_gc_copy_attributes(
VALUE dest,
VALUE obj);
192size_t rb_size_mul_or_raise(
size_t,
size_t,
VALUE);
193size_t rb_size_mul_add_or_raise(
size_t,
size_t,
size_t,
VALUE);
194size_t rb_malloc_grow_capa(
size_t current_capacity,
size_t type_size);
195RUBY_ATTR_MALLOC
void *rb_xmalloc_mul_add(
size_t,
size_t,
size_t);
196RUBY_ATTR_MALLOC
void *rb_xcalloc_mul_add(
size_t,
size_t,
size_t);
197void *rb_xrealloc_mul_add(
const void *,
size_t,
size_t,
size_t);
198RUBY_ATTR_MALLOC
void *rb_xmalloc_mul_add_mul(
size_t,
size_t,
size_t,
size_t);
199RUBY_ATTR_MALLOC
void *rb_xcalloc_mul_add_mul(
size_t,
size_t,
size_t,
size_t);
200void rb_gc_obj_id_moved(
VALUE obj);
201void rb_gc_register_pinning_obj(
VALUE obj);
204void *rb_gc_ractor_cache_alloc(
rb_ractor_t *ractor);
205void rb_gc_ractor_cache_free(
void *cache);
207bool rb_gc_size_allocatable_p(
size_t size);
208size_t *rb_gc_heap_sizes(
void);
209size_t rb_gc_heap_id_for_size(
size_t size);
211void rb_gc_mark_and_move(
VALUE *ptr);
213void rb_gc_declare_weak_references(
VALUE obj);
214bool rb_gc_handle_weak_references_alive_p(
VALUE obj);
216void rb_gc_ref_update_table_values_only(
st_table *tbl);
218void rb_gc_initial_stress_set(
VALUE flag);
220void rb_gc_before_fork(
void);
221void rb_gc_after_fork(rb_pid_t pid);
223#define rb_gc_mark_and_move_ptr(ptr) do { \
224 VALUE _obj = (VALUE)*(ptr); \
225 rb_gc_mark_and_move(&_obj); \
226 if (_obj != (VALUE)*(ptr)) *(ptr) = (void *)_obj; \
229RUBY_SYMBOL_EXPORT_BEGIN
231void rb_objspace_reachable_objects_from(
VALUE obj,
void (func)(
VALUE,
void *),
void *data);
232void rb_objspace_reachable_objects_from_root(
void (func)(
const char *category,
VALUE,
void *),
void *data);
233int rb_objspace_internal_object_p(
VALUE obj);
234int rb_objspace_garbage_object_p(
VALUE obj);
235bool rb_gc_pointer_to_heap_p(
VALUE obj);
237void rb_objspace_each_objects(
238 int (*callback)(
void *start,
void *end,
size_t stride,
void *data),
241size_t rb_gc_obj_slot_size(
VALUE obj);
243VALUE rb_gc_disable_no_rest(
void);
245#define RB_GC_MAX_NAME_LEN 20
248const char *rb_objspace_data_type_name(
VALUE obj);
252size_t rb_obj_memsize_of(
VALUE);
254void rb_gc_mark_values(
long n,
const VALUE *values);
255void rb_gc_mark_vm_stack_values(
long n,
const VALUE *values);
256void rb_gc_update_values(
long n,
VALUE *values);
258const char *rb_gc_active_gc_name(
void);
259int rb_gc_modular_gc_loaded_p(
void);
261RUBY_SYMBOL_EXPORT_END
267 const char *filename,
271#ifdef RGENGC_LOGGING_WRITE
272 RGENGC_LOGGING_WRITE(a, slot, b, filename, line);
277 rb_obj_written(a,
RUBY_Qundef , b, filename, line);
280#define RB_OBJ_ATOMIC_WRITE(old, slot, young) \
281 RBIMPL_CAST(rb_obj_atomic_write((VALUE)(old), (VALUE *)(slot), (VALUE)(young), __FILE__, __LINE__))
284void rb_gc_writebarrier_remember(
VALUE obj);
285const char *rb_obj_info(
VALUE obj);
286void ruby_annotate_mmap(
const void *addr,
unsigned long size,
const char *name);
288# define SIZED_REALLOC_N(v, T, m, n) \
289 ((v) = (T *)ruby_xrealloc2_sized((void *)(v), (m), sizeof(T), (n)))
291# define SIZED_FREE(v) ruby_xfree_sized((void *)(v), sizeof(*(v)))
292# define SIZED_FREE_N(v, n) ruby_xfree_sized((void *)(v), sizeof(*(v)) * (n))
295ruby_sized_realloc_n(
void *ptr,
size_t new_count,
size_t element_size,
size_t old_count)
297 return ruby_xrealloc2_sized(ptr, new_count, element_size, old_count);
300void rb_gc_verify_shareable(
VALUE);
301bool rb_gc_checking_shareable(
void);
#define RUBY_ATOMIC_VALUE_SET(var, val)
Identical to RUBY_ATOMIC_SET, except it expects its arguments are VALUE.
#define RUBY_EXTERN
Declaration of externally visible global variables.
uint32_t rb_event_flag_t
Represents event(s).
#define RBIMPL_ATTR_MAYBE_UNUSED()
Wraps (or simulates) [[maybe_unused]]
@ RUBY_Qundef
Represents so-called undef.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.