27#include "ruby/internal/config.h"
33#ifdef HAVE_SYS_TYPES_H
34# include <sys/types.h>
37#if RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
38# pragma intrinsic(_InterlockedOr)
39#elif defined(__sun) && defined(HAVE_ATOMIC_H)
44#include "ruby/backward/2/limits.h"
49#include "ruby/internal/cast.h"
59#if defined(__DOXYGEN__) || defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
60# define RUBY_ATOMIC_GENERIC_MACRO 1
68#if defined(__DOXYGEN__)
70#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
72#elif defined(HAVE_GCC_SYNC_BUILTINS)
78#elif defined(__sun) && defined(HAVE_ATOMIC_H)
80#elif defined(HAVE_STDATOMIC_H)
81# include <stdatomic.h>
84# error No atomic operation found
88#if defined(HAVE_GCC_ATOMIC_BUILTINS)
89# define RBIMPL_ATOMIC_RELAXED __ATOMIC_RELAXED
90# define RBIMPL_ATOMIC_ACQUIRE __ATOMIC_ACQUIRE
91# define RBIMPL_ATOMIC_RELEASE __ATOMIC_RELEASE
92# define RBIMPL_ATOMIC_ACQ_REL __ATOMIC_ACQ_REL
93# define RBIMPL_ATOMIC_SEQ_CST __ATOMIC_SEQ_CST
94#elif defined(HAVE_STDATOMIC_H)
95# define RBIMPL_ATOMIC_RELAXED memory_order_relaxed
96# define RBIMPL_ATOMIC_ACQUIRE memory_order_acquire
97# define RBIMPL_ATOMIC_RELEASE memory_order_release
98# define RBIMPL_ATOMIC_ACQ_REL memory_order_acq_rel
99# define RBIMPL_ATOMIC_SEQ_CST memory_order_seq_cst
102# define RBIMPL_ATOMIC_RELAXED 0
103# define RBIMPL_ATOMIC_ACQUIRE 1
104# define RBIMPL_ATOMIC_RELEASE 2
105# define RBIMPL_ATOMIC_ACQ_REL 3
106# define RBIMPL_ATOMIC_SEQ_CST 4
118#define RUBY_ATOMIC_FETCH_ADD(var, val) rbimpl_atomic_fetch_add(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
129#define RUBY_ATOMIC_FETCH_SUB(var, val) rbimpl_atomic_fetch_sub(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
141#define RUBY_ATOMIC_OR(var, val) rbimpl_atomic_or(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
152#define RUBY_ATOMIC_EXCHANGE(var, val) rbimpl_atomic_exchange(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
165#define RUBY_ATOMIC_CAS(var, oldval, newval) \
166 rbimpl_atomic_cas(&(var), (oldval), (newval), RBIMPL_ATOMIC_SEQ_CST, RBIMPL_ATOMIC_SEQ_CST)
175#define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var), RBIMPL_ATOMIC_SEQ_CST)
185#define RUBY_ATOMIC_SET(var, val) rbimpl_atomic_store(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
195#define RUBY_ATOMIC_ADD(var, val) rbimpl_atomic_add(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
205#define RUBY_ATOMIC_SUB(var, val) rbimpl_atomic_sub(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
214#define RUBY_ATOMIC_INC(var) rbimpl_atomic_inc(&(var), RBIMPL_ATOMIC_SEQ_CST)
223#define RUBY_ATOMIC_DEC(var) rbimpl_atomic_dec(&(var), RBIMPL_ATOMIC_SEQ_CST)
235#define RUBY_ATOMIC_SIZE_FETCH_ADD(var, val) rbimpl_atomic_size_fetch_add(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
246#define RUBY_ATOMIC_SIZE_INC(var) rbimpl_atomic_size_inc(&(var), RBIMPL_ATOMIC_SEQ_CST)
257#define RUBY_ATOMIC_SIZE_DEC(var) rbimpl_atomic_size_dec(&(var), RBIMPL_ATOMIC_SEQ_CST)
270#define RUBY_ATOMIC_SIZE_EXCHANGE(var, val) \
271 rbimpl_atomic_size_exchange(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
284#define RUBY_ATOMIC_SIZE_CAS(var, oldval, newval) \
285 rbimpl_atomic_size_cas(&(var), (oldval), (newval), RBIMPL_ATOMIC_SEQ_CST, RBIMPL_ATOMIC_SEQ_CST)
297#define RUBY_ATOMIC_SIZE_ADD(var, val) rbimpl_atomic_size_add(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
309#define RUBY_ATOMIC_SIZE_SUB(var, val) rbimpl_atomic_size_sub(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
327#define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \
328 RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val, RBIMPL_ATOMIC_SEQ_CST))
338#define RUBY_ATOMIC_PTR_LOAD(var) \
339 RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var, RBIMPL_ATOMIC_SEQ_CST))
351#define RUBY_ATOMIC_PTR_SET(var, val) \
352 rbimpl_atomic_ptr_store((volatile void **)&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
365#define RUBY_ATOMIC_PTR_CAS(var, oldval, newval) \
366 RBIMPL_CAST(rbimpl_atomic_ptr_cas((void **)&(var), (void *)(oldval), (void *)(newval), RBIMPL_ATOMIC_SEQ_CST, RBIMPL_ATOMIC_SEQ_CST))
378#define RUBY_ATOMIC_VALUE_SET(var, val) \
379 rbimpl_atomic_value_store(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
392#define RUBY_ATOMIC_VALUE_EXCHANGE(var, val) \
393 rbimpl_atomic_value_exchange(&(var), (val), RBIMPL_ATOMIC_SEQ_CST)
406#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval) \
407 rbimpl_atomic_value_cas(&(var), (oldval), (newval), RBIMPL_ATOMIC_SEQ_CST, RBIMPL_ATOMIC_SEQ_CST)
419#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
420 return __atomic_fetch_add(ptr, val, memory_order);
422#elif defined(HAVE_GCC_SYNC_BUILTINS)
423 return __sync_fetch_and_add(ptr, val);
426 return InterlockedExchangeAdd(ptr, val);
428#elif defined(__sun) && defined(HAVE_ATOMIC_H)
435 return atomic_add_int_nv(ptr, val) - val;
437#elif defined(HAVE_STDATOMIC_H)
438 return atomic_fetch_add_explicit((_Atomic
volatile rb_atomic_t *)ptr, val, memory_order);
441# error Unsupported platform.
450rbimpl_atomic_size_fetch_add(volatile
size_t *ptr,
size_t val,
int memory_order)
455#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
456 return __atomic_fetch_add(ptr, val, memory_order);
458#elif defined(HAVE_GCC_SYNC_BUILTINS)
459 return __sync_fetch_and_add(ptr, val);
462 return InterlockedExchangeAdd64(ptr, val);
464#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
467 atomic_add_long(ptr, val);
469#elif defined(__sun) && defined(HAVE_ATOMIC_H)
473 rbimpl_atomic_fetch_add(tmp, val, memory_order);
475#elif defined(HAVE_STDATOMIC_H)
476 return atomic_fetch_add_explicit((_Atomic
volatile size_t *)ptr, val, memory_order);
479# error Unsupported platform.
492#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
498 __atomic_add_fetch(ptr, val, memory_order);
500#elif defined(HAVE_GCC_SYNC_BUILTINS)
501 __sync_add_and_fetch(ptr, val);
509 InterlockedExchangeAdd(ptr, val);
511#elif defined(__sun) && defined(HAVE_ATOMIC_H)
514 atomic_add_int(ptr, val);
516#elif defined(HAVE_STDATOMIC_H)
517 atomic_fetch_add_explicit((_Atomic
volatile rb_atomic_t *)ptr, val, memory_order);
520# error Unsupported platform.
528rbimpl_atomic_size_add(volatile
size_t *ptr,
size_t val,
int memory_order)
533#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
534 __atomic_add_fetch(ptr, val, memory_order);
536#elif defined(HAVE_GCC_SYNC_BUILTINS)
537 __sync_add_and_fetch(ptr, val);
541 InterlockedExchangeAdd64(ptr, val);
543#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
546 atomic_add_long(ptr, val);
548#elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H))
552 rbimpl_atomic_add(tmp, val, memory_order);
554#elif defined(HAVE_STDATOMIC_H)
555 atomic_fetch_add_explicit((_Atomic
volatile size_t *)ptr, val, memory_order);
558# error Unsupported platform.
566rbimpl_atomic_inc(volatile
rb_atomic_t *ptr,
int memory_order)
571#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
572 rbimpl_atomic_add(ptr, 1, memory_order);
575 InterlockedIncrement(ptr);
577#elif defined(__sun) && defined(HAVE_ATOMIC_H)
578 atomic_inc_uint(ptr);
580#elif defined(HAVE_STDATOMIC_H)
581 rbimpl_atomic_add(ptr, 1, memory_order);
584# error Unsupported platform.
592rbimpl_atomic_size_inc(volatile
size_t *ptr,
int memory_order)
597#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
598 rbimpl_atomic_size_add(ptr, 1, memory_order);
601 InterlockedIncrement64(ptr);
603#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
604 atomic_inc_ulong(ptr);
606#elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H))
609 rbimpl_atomic_size_add(ptr, 1, memory_order);
611#elif defined(HAVE_STDATOMIC_H)
612 rbimpl_atomic_size_add(ptr, 1, memory_order);
615# error Unsupported platform.
628#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
629 return __atomic_fetch_sub(ptr, val, memory_order);
631#elif defined(HAVE_GCC_SYNC_BUILTINS)
632 return __sync_fetch_and_sub(ptr, val);
636 return InterlockedExchangeAdd(ptr, -val);
638#elif defined(__sun) && defined(HAVE_ATOMIC_H)
640 const signed neg = -1;
642 return atomic_add_int_nv(ptr, neg * val) + val;
644#elif defined(HAVE_STDATOMIC_H)
645 return atomic_fetch_sub_explicit((_Atomic
volatile rb_atomic_t *)ptr, val, memory_order);
648# error Unsupported platform.
661#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
662 __atomic_sub_fetch(ptr, val, memory_order);
664#elif defined(HAVE_GCC_SYNC_BUILTINS)
665 __sync_sub_and_fetch(ptr, val);
668 InterlockedExchangeAdd(ptr, -val);
670#elif defined(__sun) && defined(HAVE_ATOMIC_H)
671 const signed neg = -1;
673 atomic_add_int(ptr, neg * val);
675#elif defined(HAVE_STDATOMIC_H)
676 atomic_fetch_sub_explicit((_Atomic
volatile rb_atomic_t *)ptr, val, memory_order);
679# error Unsupported platform.
687rbimpl_atomic_size_sub(volatile
size_t *ptr,
size_t val,
int memory_order)
692#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
693 __atomic_sub_fetch(ptr, val, memory_order);
695#elif defined(HAVE_GCC_SYNC_BUILTINS)
696 __sync_sub_and_fetch(ptr, val);
699 const ssize_t neg = -1;
700 InterlockedExchangeAdd64(ptr, neg * val);
702#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
703 const signed neg = -1;
705 atomic_add_long(ptr, neg * val);
707#elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H))
711 rbimpl_atomic_sub(tmp, val, memory_order);
713#elif defined(HAVE_STDATOMIC_H)
714 atomic_fetch_sub_explicit((_Atomic
volatile size_t *)ptr, val, memory_order);
717# error Unsupported platform.
725rbimpl_atomic_dec(volatile
rb_atomic_t *ptr,
int memory_order)
730#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
731 rbimpl_atomic_sub(ptr, 1, memory_order);
734 InterlockedDecrement(ptr);
736#elif defined(__sun) && defined(HAVE_ATOMIC_H)
737 atomic_dec_uint(ptr);
739#elif defined(HAVE_STDATOMIC_H)
740 rbimpl_atomic_sub(ptr, 1, memory_order);
743# error Unsupported platform.
751rbimpl_atomic_size_dec(volatile
size_t *ptr,
int memory_order)
756#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
757 rbimpl_atomic_size_sub(ptr, 1, memory_order);
760 InterlockedDecrement64(ptr);
762#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
763 atomic_dec_ulong(ptr);
765#elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H))
768 rbimpl_atomic_size_sub(ptr, 1, memory_order);
770#elif defined(HAVE_STDATOMIC_H)
771 rbimpl_atomic_size_sub(ptr, 1, memory_order);
774# error Unsupported platform.
787#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
788 __atomic_or_fetch(ptr, val, memory_order);
790#elif defined(HAVE_GCC_SYNC_BUILTINS)
791 __sync_or_and_fetch(ptr, val);
793#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
794 _InterlockedOr(ptr, val);
796#elif defined(_WIN32) && defined(__GNUC__)
804#elif defined(_WIN32) && defined(_M_IX86)
807 __asm lock or [eax], ecx;
809#elif defined(__sun) && defined(HAVE_ATOMIC_H)
810 atomic_or_uint(ptr, val);
812#elif !defined(_WIN32) && defined(HAVE_STDATOMIC_H)
813 atomic_fetch_or_explicit((_Atomic
volatile rb_atomic_t *)ptr, val, memory_order);
816# error Unsupported platform.
821#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
825 return rbimpl_atomic_or(var, val);
838#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
839 return __atomic_exchange_n(ptr, val, memory_order);
841#elif defined(HAVE_GCC_SYNC_BUILTINS)
842 return __sync_lock_test_and_set(ptr, val);
845 return InterlockedExchange(ptr, val);
847#elif defined(__sun) && defined(HAVE_ATOMIC_H)
848 return atomic_swap_uint(ptr, val);
850#elif defined(HAVE_STDATOMIC_H)
851 return atomic_exchange_explicit((_Atomic
volatile rb_atomic_t *)ptr, val, memory_order);
854# error Unsupported platform.
862rbimpl_atomic_size_exchange(volatile
size_t *ptr,
size_t val,
int memory_order)
867#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
868 return __atomic_exchange_n(ptr, val, memory_order);
870#elif defined(HAVE_GCC_SYNC_BUILTINS)
871 return __sync_lock_test_and_set(ptr, val);
874 return InterlockedExchange64(ptr, val);
876#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
877 return atomic_swap_ulong(ptr, val);
879#elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H))
883 const rb_atomic_t ret = rbimpl_atomic_exchange(tmp, val, memory_order);
884 return RBIMPL_CAST((
size_t)ret);
886#elif defined(HAVE_STDATOMIC_H)
887 return atomic_exchange_explicit((_Atomic
volatile size_t *)ptr, val, memory_order);
890# error Unsupported platform.
898rbimpl_atomic_size_store(volatile
size_t *ptr,
size_t val,
int memory_order)
903#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
904 __atomic_store_n(ptr, val, memory_order);
907 rbimpl_atomic_size_exchange(ptr, val, memory_order);
916rbimpl_atomic_ptr_exchange(
void *volatile *ptr, const
void *val,
int memory_order)
921#elif defined(InterlockedExchangePointer)
923 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
924 PVOID pval = RBIMPL_CAST((PVOID)val);
925 return InterlockedExchangePointer(pptr, pval);
927#elif defined(__sun) && defined(HAVE_ATOMIC_H)
928 return atomic_swap_ptr(ptr, RBIMPL_CAST((
void *)val));
933 const size_t sval = RBIMPL_CAST((
size_t)val);
934 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
935 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval, memory_order);
936 return RBIMPL_CAST((
void *)sret);
945rbimpl_atomic_ptr_store(volatile
void **ptr,
void *val,
int memory_order)
949 const size_t sval = RBIMPL_CAST((
size_t)val);
950 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
951 rbimpl_atomic_size_store(sptr, sval, memory_order);
958rbimpl_atomic_value_exchange(volatile
VALUE *ptr,
VALUE val,
int memory_order)
962 const size_t sval = RBIMPL_CAST((
size_t)val);
963 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
964 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval, memory_order);
965 return RBIMPL_CAST((
VALUE)sret);
972rbimpl_atomic_value_store(volatile
VALUE *ptr,
VALUE val,
int memory_order)
976 const size_t sval = RBIMPL_CAST((
size_t)val);
977 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
978 rbimpl_atomic_size_store(sptr, sval, memory_order);
985rbimpl_atomic_load(volatile
rb_atomic_t *ptr,
int memory_order)
990#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
991 return __atomic_load_n(ptr, memory_order);
993 return rbimpl_atomic_fetch_add(ptr, 0, memory_order);
1006#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
1007 __atomic_store_n(ptr, val, memory_order);
1011 rbimpl_atomic_exchange(ptr, val, memory_order);
1022 (void)success_memorder;
1023 (void)failure_memorder;
1026#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
1027 __atomic_compare_exchange_n(
1028 ptr, &oldval, newval, 0, success_memorder, failure_memorder);
1031#elif defined(HAVE_GCC_SYNC_BUILTINS)
1032 return __sync_val_compare_and_swap(ptr, oldval, newval);
1034#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
1035 return InterlockedCompareExchange(ptr, newval, oldval);
1037#elif defined(_WIN32)
1038 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
1039 PVOID pold = RBIMPL_CAST((PVOID)oldval);
1040 PVOID pnew = RBIMPL_CAST((PVOID)newval);
1041 PVOID pret = InterlockedCompareExchange(pptr, pnew, pold);
1044#elif defined(__sun) && defined(HAVE_ATOMIC_H)
1045 return atomic_cas_uint(ptr, oldval, newval);
1047#elif defined(HAVE_STDATOMIC_H)
1048 atomic_compare_exchange_strong_explicit(
1049 (_Atomic
volatile rb_atomic_t *)ptr, &oldval, newval, success_memorder, failure_memorder);
1053# error Unsupported platform.
1058#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
1062 return rbimpl_atomic_cas(var, oldval, newval);
1070rbimpl_atomic_size_cas(volatile
size_t *ptr,
size_t oldval,
size_t newval,
int success_memorder,
int failure_memorder)
1072 (void)success_memorder;
1073 (void)failure_memorder;
1076#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
1077 __atomic_compare_exchange_n(
1078 ptr, &oldval, newval, 0, success_memorder, failure_memorder);
1081#elif defined(HAVE_GCC_SYNC_BUILTINS)
1082 return __sync_val_compare_and_swap(ptr, oldval, newval);
1084#elif defined(_WIN64)
1085 return InterlockedCompareExchange64(ptr, newval, oldval);
1087#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
1088 return atomic_cas_ulong(ptr, oldval, newval);
1090#elif defined(_WIN32) || (defined(__sun) && defined(HAVE_ATOMIC_H))
1094 return rbimpl_atomic_cas(tmp, oldval, newval, success_memorder, failure_memorder);
1096#elif defined(HAVE_STDATOMIC_H)
1097 atomic_compare_exchange_strong_explicit(
1098 (_Atomic
volatile size_t *)ptr, &oldval, newval, success_memorder, failure_memorder);
1102# error Unsupported platform.
1110rbimpl_atomic_ptr_cas(
void **ptr, const
void *oldval, const
void *newval,
int success_memorder,
int failure_memorder)
1112 (void)success_memorder;
1113 (void)failure_memorder;
1116#elif defined(InterlockedExchangePointer)
1119 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
1120 PVOID pold = RBIMPL_CAST((PVOID)oldval);
1121 PVOID pnew = RBIMPL_CAST((PVOID)newval);
1122 return InterlockedCompareExchangePointer(pptr, pnew, pold);
1124#elif defined(__sun) && defined(HAVE_ATOMIC_H)
1125 void *pold = RBIMPL_CAST((
void *)oldval);
1126 void *pnew = RBIMPL_CAST((
void *)newval);
1127 return atomic_cas_ptr(ptr, pold, pnew);
1133 const size_t snew = RBIMPL_CAST((
size_t)newval);
1134 const size_t sold = RBIMPL_CAST((
size_t)oldval);
1135 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
1136 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew, success_memorder, failure_memorder);
1137 return RBIMPL_CAST((
void *)sret);
1146rbimpl_atomic_ptr_load(
void **ptr,
int memory_order)
1151#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
1152 return __atomic_load_n(ptr, memory_order);
1155 return rbimpl_atomic_ptr_cas(ptr, val, val, memory_order, memory_order);
1163rbimpl_atomic_value_load(volatile
VALUE *ptr,
int memory_order)
1165 return RBIMPL_CAST((
VALUE)rbimpl_atomic_ptr_load((
void **)ptr, memory_order));
1172rbimpl_atomic_value_cas(volatile
VALUE *ptr,
VALUE oldval,
VALUE newval,
int success_memorder,
int failure_memorder)
1176 const size_t snew = RBIMPL_CAST((
size_t)newval);
1177 const size_t sold = RBIMPL_CAST((
size_t)oldval);
1178 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
1179 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew, success_memorder, failure_memorder);
1180 return RBIMPL_CAST((
VALUE)sret);
Defines RBIMPL_ATTR_ARTIFICIAL.
#define RBIMPL_ATTR_ARTIFICIAL()
Wraps (or simulates) __attribute__((artificial))
#define RBIMPL_ASSERT_OR_ASSUME(...)
This is either RUBY_ASSERT or RBIMPL_ASSUME, depending on RUBY_DEBUG.
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
Defines RBIMPL_COMPILER_SINCE.
Defines RBIMPL_STATIC_ASSERT.
#define RBIMPL_STATIC_ASSERT
Wraps (or simulates) static_assert
Defines RBIMPL_ATTR_NOALIAS.
#define RBIMPL_ATTR_NOALIAS()
Wraps (or simulates) __declspec((noalias))
Defines RBIMPL_ATTR_NONNULL.
#define RBIMPL_ATTR_NONNULL(list)
Wraps (or simulates) __attribute__((nonnull))
#define inline
Old Visual Studio versions do not support the inline keyword, so we need to define it to be __inline.
uintptr_t VALUE
Type that represents a Ruby object.