27#include "ruby/internal/config.h"
33#ifdef HAVE_SYS_TYPES_H
34# include <sys/types.h>
37#if RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
38# pragma intrinsic(_InterlockedOr)
39#elif defined(__sun) && defined(HAVE_ATOMIC_H)
44#include "ruby/backward/2/limits.h"
49#include "ruby/internal/cast.h"
59#if defined(__DOXYGEN__) || defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
60# define RUBY_ATOMIC_GENERIC_MACRO 1
68#if defined(__DOXYGEN__)
70#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
72#elif defined(HAVE_GCC_SYNC_BUILTINS)
78#elif defined(__sun) && defined(HAVE_ATOMIC_H)
81# error No atomic operation found
93#define RUBY_ATOMIC_FETCH_ADD(var, val) rbimpl_atomic_fetch_add(&(var), (val))
104#define RUBY_ATOMIC_FETCH_SUB(var, val) rbimpl_atomic_fetch_sub(&(var), (val))
116#define RUBY_ATOMIC_OR(var, val) rbimpl_atomic_or(&(var), (val))
127#define RUBY_ATOMIC_EXCHANGE(var, val) rbimpl_atomic_exchange(&(var), (val))
140#define RUBY_ATOMIC_CAS(var, oldval, newval) \
141 rbimpl_atomic_cas(&(var), (oldval), (newval))
150#define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var))
160#define RUBY_ATOMIC_SET(var, val) rbimpl_atomic_set(&(var), (val))
170#define RUBY_ATOMIC_ADD(var, val) rbimpl_atomic_add(&(var), (val))
180#define RUBY_ATOMIC_SUB(var, val) rbimpl_atomic_sub(&(var), (val))
189#define RUBY_ATOMIC_INC(var) rbimpl_atomic_inc(&(var))
198#define RUBY_ATOMIC_DEC(var) rbimpl_atomic_dec(&(var))
209#define RUBY_ATOMIC_SIZE_INC(var) rbimpl_atomic_size_inc(&(var))
220#define RUBY_ATOMIC_SIZE_DEC(var) rbimpl_atomic_size_dec(&(var))
233#define RUBY_ATOMIC_SIZE_EXCHANGE(var, val) \
234 rbimpl_atomic_size_exchange(&(var), (val))
247#define RUBY_ATOMIC_SIZE_CAS(var, oldval, newval) \
248 rbimpl_atomic_size_cas(&(var), (oldval), (newval))
260#define RUBY_ATOMIC_SIZE_ADD(var, val) rbimpl_atomic_size_add(&(var), (val))
272#define RUBY_ATOMIC_SIZE_SUB(var, val) rbimpl_atomic_size_sub(&(var), (val))
290#define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \
291 RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val))
301#define RUBY_ATOMIC_PTR_LOAD(var) \
302 RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var))
315#define RUBY_ATOMIC_PTR_CAS(var, oldval, newval) \
316 RBIMPL_CAST(rbimpl_atomic_ptr_cas((void **)&(var), (void *)(oldval), (void *)(newval)))
328#define RUBY_ATOMIC_VALUE_SET(var, val) \
329 rbimpl_atomic_value_set(&(var), (val))
342#define RUBY_ATOMIC_VALUE_EXCHANGE(var, val) \
343 rbimpl_atomic_value_exchange(&(var), (val))
356#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval) \
357 rbimpl_atomic_value_cas(&(var), (oldval), (newval))
368#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
369 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
371#elif defined(HAVE_GCC_SYNC_BUILTINS)
372 return __sync_fetch_and_add(ptr, val);
375 return InterlockedExchangeAdd(ptr, val);
377#elif defined(__sun) && defined(HAVE_ATOMIC_H)
384 return atomic_add_int_nv(ptr, val) - val;
387# error Unsupported platform.
399#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
405 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
407#elif defined(HAVE_GCC_SYNC_BUILTINS)
408 __sync_add_and_fetch(ptr, val);
416 InterlockedExchangeAdd(ptr, val);
418#elif defined(__sun) && defined(HAVE_ATOMIC_H)
421 atomic_add_int(ptr, val);
424# error Unsupported platform.
432rbimpl_atomic_size_add(volatile
size_t *ptr,
size_t val)
436#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
437 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
439#elif defined(HAVE_GCC_SYNC_BUILTINS)
440 __sync_add_and_fetch(ptr, val);
444 InterlockedExchangeAdd64(ptr, val);
446#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
449 atomic_add_long(ptr, val);
455 rbimpl_atomic_add(tmp, val);
468#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
469 rbimpl_atomic_add(ptr, 1);
472 InterlockedIncrement(ptr);
474#elif defined(__sun) && defined(HAVE_ATOMIC_H)
475 atomic_inc_uint(ptr);
478 rbimpl_atomic_add(ptr, 1);
487rbimpl_atomic_size_inc(volatile
size_t *ptr)
491#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
492 rbimpl_atomic_size_add(ptr, 1);
495 InterlockedIncrement64(ptr);
497#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
498 atomic_inc_ulong(ptr);
503 rbimpl_atomic_size_add(ptr, 1);
516#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
517 return __atomic_fetch_sub(ptr, val, __ATOMIC_SEQ_CST);
519#elif defined(HAVE_GCC_SYNC_BUILTINS)
520 return __sync_fetch_and_sub(ptr, val);
524 return InterlockedExchangeAdd(ptr, -val);
526#elif defined(__sun) && defined(HAVE_ATOMIC_H)
528 const signed neg = -1;
530 return atomic_add_int_nv(ptr, neg * val) + val;
533# error Unsupported platform.
545#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
546 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
548#elif defined(HAVE_GCC_SYNC_BUILTINS)
549 __sync_sub_and_fetch(ptr, val);
552 InterlockedExchangeAdd(ptr, -val);
554#elif defined(__sun) && defined(HAVE_ATOMIC_H)
555 const signed neg = -1;
557 atomic_add_int(ptr, neg * val);
560# error Unsupported platform.
568rbimpl_atomic_size_sub(volatile
size_t *ptr,
size_t val)
572#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
573 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
575#elif defined(HAVE_GCC_SYNC_BUILTINS)
576 __sync_sub_and_fetch(ptr, val);
579 const ssize_t neg = -1;
580 InterlockedExchangeAdd64(ptr, neg * val);
582#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
583 const signed neg = -1;
585 atomic_add_long(ptr, neg * val);
591 rbimpl_atomic_sub(tmp, val);
604#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
605 rbimpl_atomic_sub(ptr, 1);
608 InterlockedDecrement(ptr);
610#elif defined(__sun) && defined(HAVE_ATOMIC_H)
611 atomic_dec_uint(ptr);
614 rbimpl_atomic_sub(ptr, 1);
623rbimpl_atomic_size_dec(volatile
size_t *ptr)
627#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
628 rbimpl_atomic_size_sub(ptr, 1);
631 InterlockedDecrement64(ptr);
633#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
634 atomic_dec_ulong(ptr);
639 rbimpl_atomic_size_sub(ptr, 1);
652#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
653 __atomic_or_fetch(ptr, val, __ATOMIC_SEQ_CST);
655#elif defined(HAVE_GCC_SYNC_BUILTINS)
656 __sync_or_and_fetch(ptr, val);
658#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
659 _InterlockedOr(ptr, val);
661#elif defined(_WIN32) && defined(__GNUC__)
669#elif defined(_WIN32) && defined(_M_IX86)
672 __asm lock or [eax], ecx;
674#elif defined(__sun) && defined(HAVE_ATOMIC_H)
675 atomic_or_uint(ptr, val);
678# error Unsupported platform.
683#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
687 return rbimpl_atomic_or(var, val);
699#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
700 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
702#elif defined(HAVE_GCC_SYNC_BUILTINS)
703 return __sync_lock_test_and_set(ptr, val);
706 return InterlockedExchange(ptr, val);
708#elif defined(__sun) && defined(HAVE_ATOMIC_H)
709 return atomic_swap_uint(ptr, val);
712# error Unsupported platform.
720rbimpl_atomic_size_exchange(volatile
size_t *ptr,
size_t val)
724#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
725 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
727#elif defined(HAVE_GCC_SYNC_BUILTINS)
728 return __sync_lock_test_and_set(ptr, val);
731 return InterlockedExchange64(ptr, val);
733#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
734 return atomic_swap_ulong(ptr, val);
740 const rb_atomic_t ret = rbimpl_atomic_exchange(tmp, val);
741 return RBIMPL_CAST((
size_t)ret);
750rbimpl_atomic_size_set(volatile
size_t *ptr,
size_t val)
754#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
755 __atomic_store_n(ptr, val, __ATOMIC_SEQ_CST);
758 rbimpl_atomic_size_exchange(ptr, val);
767rbimpl_atomic_ptr_exchange(
void *volatile *ptr, const
void *val)
771#elif defined(InterlockedExchangePointer)
773 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
774 PVOID pval = RBIMPL_CAST((PVOID)val);
775 return InterlockedExchangePointer(pptr, pval);
777#elif defined(__sun) && defined(HAVE_ATOMIC_H)
778 return atomic_swap_ptr(ptr, RBIMPL_CAST((
void *)val));
783 const size_t sval = RBIMPL_CAST((
size_t)val);
784 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
785 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
786 return RBIMPL_CAST((
void *)sret);
795rbimpl_atomic_value_exchange(volatile
VALUE *ptr,
VALUE val)
799 const size_t sval = RBIMPL_CAST((
size_t)val);
800 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
801 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
802 return RBIMPL_CAST((
VALUE)sret);
809rbimpl_atomic_value_set(volatile
VALUE *ptr,
VALUE val)
813 const size_t sval = RBIMPL_CAST((
size_t)val);
814 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
815 rbimpl_atomic_size_set(sptr, sval);
826#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
827 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
829 return rbimpl_atomic_fetch_add(ptr, 0);
841#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
842 __atomic_store_n(ptr, val, __ATOMIC_SEQ_CST);
846 rbimpl_atomic_exchange(ptr, val);
859#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
860 __atomic_compare_exchange_n(
861 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
864#elif defined(HAVE_GCC_SYNC_BUILTINS)
865 return __sync_val_compare_and_swap(ptr, oldval, newval);
867#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
868 return InterlockedCompareExchange(ptr, newval, oldval);
871 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
872 PVOID pold = RBIMPL_CAST((PVOID)oldval);
873 PVOID pnew = RBIMPL_CAST((PVOID)newval);
874 PVOID pret = InterlockedCompareExchange(pptr, pnew, pold);
877#elif defined(__sun) && defined(HAVE_ATOMIC_H)
878 return atomic_cas_uint(ptr, oldval, newval);
881# error Unsupported platform.
886#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
890 return rbimpl_atomic_cas(var, oldval, newval);
898rbimpl_atomic_size_cas(volatile
size_t *ptr,
size_t oldval,
size_t newval)
902#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
903 __atomic_compare_exchange_n(
904 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
907#elif defined(HAVE_GCC_SYNC_BUILTINS)
908 return __sync_val_compare_and_swap(ptr, oldval, newval);
911 return InterlockedCompareExchange64(ptr, newval, oldval);
913#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
914 return atomic_cas_ulong(ptr, oldval, newval);
920 return rbimpl_atomic_cas(tmp, oldval, newval);
929rbimpl_atomic_ptr_cas(
void **ptr, const
void *oldval, const
void *newval)
933#elif defined(InterlockedExchangePointer)
936 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
937 PVOID pold = RBIMPL_CAST((PVOID)oldval);
938 PVOID pnew = RBIMPL_CAST((PVOID)newval);
939 return InterlockedCompareExchangePointer(pptr, pnew, pold);
941#elif defined(__sun) && defined(HAVE_ATOMIC_H)
942 void *pold = RBIMPL_CAST((
void *)oldval);
943 void *pnew = RBIMPL_CAST((
void *)newval);
944 return atomic_cas_ptr(ptr, pold, pnew);
950 const size_t snew = RBIMPL_CAST((
size_t)newval);
951 const size_t sold = RBIMPL_CAST((
size_t)oldval);
952 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
953 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
954 return RBIMPL_CAST((
void *)sret);
963rbimpl_atomic_ptr_load(
void **ptr)
967#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
968 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
971 return rbimpl_atomic_ptr_cas(ptr, val, val);
979rbimpl_atomic_value_cas(volatile
VALUE *ptr,
VALUE oldval,
VALUE newval)
983 const size_t snew = RBIMPL_CAST((
size_t)newval);
984 const size_t sold = RBIMPL_CAST((
size_t)oldval);
985 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
986 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
987 return RBIMPL_CAST((
VALUE)sret);
Defines RBIMPL_ATTR_ARTIFICIAL.
#define RBIMPL_ATTR_ARTIFICIAL()
Wraps (or simulates) __attribute__((artificial))
#define RBIMPL_ASSERT_OR_ASSUME(...)
This is either RUBY_ASSERT or RBIMPL_ASSUME, depending on RUBY_DEBUG.
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
Defines RBIMPL_COMPILER_SINCE.
Defines RBIMPL_STATIC_ASSERT.
#define RBIMPL_STATIC_ASSERT
Wraps (or simulates) static_assert
Defines RBIMPL_ATTR_NOALIAS.
#define RBIMPL_ATTR_NOALIAS()
Wraps (or simulates) __declspec((noalias))
Defines RBIMPL_ATTR_NONNULL.
#define RBIMPL_ATTR_NONNULL(list)
Wraps (or simulates) __attribute__((nonnull))
#define inline
Old Visual Studio versions do not support the inline keyword, so we need to define it to be __inline.
uintptr_t VALUE
Type that represents a Ruby object.