29#define RB_NEWOBJ_OF(var, T, c, f) \
30 T *(var) = (T *)(((f) & FL_WB_PROTECTED) ? \
31 rb_wb_protected_newobj_of((c), (f) & ~FL_WB_PROTECTED) : \
32 rb_wb_unprotected_newobj_of((c), (f)))
34#define RB_EC_NEWOBJ_OF(ec, var, T, c, f) \
35 T *(var) = (T *)(((f) & FL_WB_PROTECTED) ? \
36 rb_ec_wb_protected_newobj_of((ec), (c), (f) & ~FL_WB_PROTECTED) : \
37 rb_wb_unprotected_newobj_of((c), (f)))
39#define NEWOBJ_OF(var, T, c, f) RB_NEWOBJ_OF((var), T, (c), (f))
40#define RB_OBJ_GC_FLAGS_MAX 6
42#ifndef USE_UNALIGNED_MEMBER_ACCESS
43# define UNALIGNED_MEMBER_ACCESS(expr) (expr)
44#elif ! USE_UNALIGNED_MEMBER_ACCESS
45# define UNALIGNED_MEMBER_ACCESS(expr) (expr)
46#elif ! (__has_warning("-Waddress-of-packed-member") || GCC_VERSION_SINCE(9, 0, 0))
47# define UNALIGNED_MEMBER_ACCESS(expr) (expr)
50# define UNALIGNED_MEMBER_ACCESS(expr) __extension__({ \
51 COMPILER_WARNING_PUSH; \
52 COMPILER_WARNING_IGNORED(-Waddress-of-packed-member); \
53 __typeof__(expr) unaligned_member_access_result = (expr); \
54 COMPILER_WARNING_POP; \
55 unaligned_member_access_result; \
59#define UNALIGNED_MEMBER_PTR(ptr, mem) UNALIGNED_MEMBER_ACCESS(&(ptr)->mem)
60#define RB_OBJ_WRITE(a, slot, b) \
61 rb_obj_write((VALUE)(a), UNALIGNED_MEMBER_ACCESS((VALUE *)(slot)), \
62 (VALUE)(b), __FILE__, __LINE__)
79#if __has_attribute(alloc_align)
91static inline void ruby_sized_xfree_inlined(
void *
ptr,
size_t size);
94RUBY_SYMBOL_EXPORT_BEGIN
109RUBY_SYMBOL_EXPORT_END
117#if defined(HAVE_MALLOC_USABLE_SIZE) || defined(HAVE_MALLOC_SIZE) || defined(_WIN32)
120ruby_sized_xrealloc_inlined(
void *
ptr,
size_t new_size,
size_t old_size)
126ruby_sized_xrealloc2_inlined(
void *
ptr,
size_t new_count,
size_t elemsiz,
size_t old_count)
132ruby_sized_xfree_inlined(
void *
ptr,
size_t size)
137# define SIZED_REALLOC_N(x, y, z, w) REALLOC_N(x, y, z)
142ruby_sized_xrealloc_inlined(
void *
ptr,
size_t new_size,
size_t old_size)
148ruby_sized_xrealloc2_inlined(
void *
ptr,
size_t new_count,
size_t elemsiz,
size_t old_count)
154ruby_sized_xfree_inlined(
void *
ptr,
size_t size)
159# define SIZED_REALLOC_N(v, T, m, n) \
160 ((v) = (T *)ruby_sized_xrealloc2((void *)(v), (m), sizeof(T), (n)))
164#define ruby_sized_xrealloc ruby_sized_xrealloc_inlined
165#define ruby_sized_xrealloc2 ruby_sized_xrealloc2_inlined
166#define ruby_sized_xfree ruby_sized_xfree_inlined
#define RUBY_ATTR_ALLOC_SIZE
#define RUBY_ATTR_RETURNS_NONNULL
Internal header absorbing C compipler differences.
#define MJIT_SYMBOL_EXPORT_END
#define MJIT_SYMBOL_EXPORT_BEGIN
void ruby_xfree(void *x)
Deallocates a storage instance.
void * ruby_xrealloc2(void *ptr, size_t n, size_t new_size)
Identical to ruby_xrealloc(), except it resizes the given storage instance to newelems * newsiz bytes...
void * rb_aligned_malloc(size_t alignment, size_t size)
void * ruby_xrealloc(void *ptr, size_t new_size)
Resize the storage instance.
void *PTR64 __attribute__((mode(DI)))
Thin wrapper to ruby/config.h.
VALUE rb_wb_unprotected_newobj_of(VALUE, VALUE)
VALUE * ruby_initial_gc_stress_ptr
#define ruby_sized_xrealloc
struct ractor_newobj_cache rb_ractor_newobj_cache_t
#define ruby_sized_xrealloc2
size_t rb_obj_memsize_of(VALUE)
void rb_copy_wb_protected_attribute(VALUE dest, VALUE obj)
void ruby_mimfree(void *ptr)
int rb_ec_stack_check(struct rb_execution_context_struct *ec)
void * rb_xrealloc_mul_add(const void *, size_t, size_t, size_t)
size_t rb_size_mul_or_raise(size_t, size_t, VALUE)
size_t rb_obj_gc_flags(VALUE, ID[], size_t)
VALUE rb_class_allocate_instance(VALUE klass)
VALUE rb_objspace_gc_disable(struct rb_objspace *)
const char * rb_objspace_data_type_name(VALUE obj)
void rb_gc_ractor_newobj_cache_clear(rb_ractor_newobj_cache_t *newobj_cache)
void ruby_gc_set_params(void)
void rb_objspace_set_event_hook(const rb_event_flag_t event)
VALUE rb_objspace_gc_enable(struct rb_objspace *)
void rb_gc_verify_internal_consistency(void)
void rb_gc_mark_values(long n, const VALUE *values)
RUBY_ATTR_MALLOC void * rb_xmalloc_mul_add_mul(size_t, size_t, size_t, size_t)
const char * rb_obj_info(VALUE obj)
RUBY_ATTR_MALLOC void * rb_xcalloc_mul_add_mul(size_t, size_t, size_t, size_t)
RUBY_ATTR_MALLOC void * rb_xmalloc_mul_add(size_t, size_t, size_t)
size_t rb_size_mul_add_or_raise(size_t, size_t, size_t, VALUE)
void rb_gc_writebarrier_remember(VALUE obj)
VALUE rb_ec_wb_protected_newobj_of(struct rb_execution_context_struct *ec, VALUE klass, VALUE flags)
void rb_gc_mark_vm_stack_values(long n, const VALUE *values)
VALUE rb_wb_protected_newobj_of(VALUE, VALUE)
RUBY_ATTR_MALLOC void * ruby_mimmalloc(size_t size)
struct heap_page::@93 flags
struct heap_page * using_page
Internal header to suppres / mandate warnings.