27#include "ruby/internal/config.h"
33#ifdef HAVE_SYS_TYPES_H
34# include <sys/types.h>
37#if RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
38# pragma intrinsic(_InterlockedOr)
39#elif defined(__sun) && defined(HAVE_ATOMIC_H)
44#include "ruby/backward/2/limits.h"
49#include "ruby/internal/cast.h"
59#if defined(__DOXYGEN__) || defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
60# define RUBY_ATOMIC_GENERIC_MACRO 1
68#if defined(__DOXYGEN__)
70#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
72#elif defined(HAVE_GCC_SYNC_BUILTINS)
78#elif defined(__sun) && defined(HAVE_ATOMIC_H)
81# error No atomic operation found
93#define RUBY_ATOMIC_FETCH_ADD(var, val) rbimpl_atomic_fetch_add(&(var), (val))
104#define RUBY_ATOMIC_FETCH_SUB(var, val) rbimpl_atomic_fetch_sub(&(var), (val))
116#define RUBY_ATOMIC_OR(var, val) rbimpl_atomic_or(&(var), (val))
127#define RUBY_ATOMIC_EXCHANGE(var, val) rbimpl_atomic_exchange(&(var), (val))
140#define RUBY_ATOMIC_CAS(var, oldval, newval) \
141 rbimpl_atomic_cas(&(var), (oldval), (newval))
150#define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var))
160#define RUBY_ATOMIC_SET(var, val) rbimpl_atomic_set(&(var), (val))
170#define RUBY_ATOMIC_ADD(var, val) rbimpl_atomic_add(&(var), (val))
180#define RUBY_ATOMIC_SUB(var, val) rbimpl_atomic_sub(&(var), (val))
189#define RUBY_ATOMIC_INC(var) rbimpl_atomic_inc(&(var))
198#define RUBY_ATOMIC_DEC(var) rbimpl_atomic_dec(&(var))
210#define RUBY_ATOMIC_SIZE_FETCH_ADD(var, val) rbimpl_atomic_size_fetch_add(&(var), (val))
221#define RUBY_ATOMIC_SIZE_INC(var) rbimpl_atomic_size_inc(&(var))
232#define RUBY_ATOMIC_SIZE_DEC(var) rbimpl_atomic_size_dec(&(var))
245#define RUBY_ATOMIC_SIZE_EXCHANGE(var, val) \
246 rbimpl_atomic_size_exchange(&(var), (val))
259#define RUBY_ATOMIC_SIZE_CAS(var, oldval, newval) \
260 rbimpl_atomic_size_cas(&(var), (oldval), (newval))
272#define RUBY_ATOMIC_SIZE_ADD(var, val) rbimpl_atomic_size_add(&(var), (val))
284#define RUBY_ATOMIC_SIZE_SUB(var, val) rbimpl_atomic_size_sub(&(var), (val))
302#define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \
303 RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val))
313#define RUBY_ATOMIC_PTR_LOAD(var) \
314 RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var))
326#define RUBY_ATOMIC_PTR_SET(var, val) \
327 rbimpl_atomic_ptr_set((volatile void **)&(var), (val))
340#define RUBY_ATOMIC_PTR_CAS(var, oldval, newval) \
341 RBIMPL_CAST(rbimpl_atomic_ptr_cas((void **)&(var), (void *)(oldval), (void *)(newval)))
353#define RUBY_ATOMIC_VALUE_SET(var, val) \
354 rbimpl_atomic_value_set(&(var), (val))
367#define RUBY_ATOMIC_VALUE_EXCHANGE(var, val) \
368 rbimpl_atomic_value_exchange(&(var), (val))
381#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval) \
382 rbimpl_atomic_value_cas(&(var), (oldval), (newval))
393#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
394 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
396#elif defined(HAVE_GCC_SYNC_BUILTINS)
397 return __sync_fetch_and_add(ptr, val);
400 return InterlockedExchangeAdd(ptr, val);
402#elif defined(__sun) && defined(HAVE_ATOMIC_H)
409 return atomic_add_int_nv(ptr, val) - val;
412# error Unsupported platform.
421rbimpl_atomic_size_fetch_add(volatile
size_t *ptr,
size_t val)
425#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
426 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
428#elif defined(HAVE_GCC_SYNC_BUILTINS)
429 return __sync_fetch_and_add(ptr, val);
432 return InterlockedExchangeAdd64(ptr, val);
434#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
437 atomic_add_long(ptr, val);
443 rbimpl_atomic_fetch_add(tmp, val);
456#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
462 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
464#elif defined(HAVE_GCC_SYNC_BUILTINS)
465 __sync_add_and_fetch(ptr, val);
473 InterlockedExchangeAdd(ptr, val);
475#elif defined(__sun) && defined(HAVE_ATOMIC_H)
478 atomic_add_int(ptr, val);
481# error Unsupported platform.
489rbimpl_atomic_size_add(volatile
size_t *ptr,
size_t val)
493#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
494 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
496#elif defined(HAVE_GCC_SYNC_BUILTINS)
497 __sync_add_and_fetch(ptr, val);
501 InterlockedExchangeAdd64(ptr, val);
503#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
506 atomic_add_long(ptr, val);
512 rbimpl_atomic_add(tmp, val);
525#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
526 rbimpl_atomic_add(ptr, 1);
529 InterlockedIncrement(ptr);
531#elif defined(__sun) && defined(HAVE_ATOMIC_H)
532 atomic_inc_uint(ptr);
535 rbimpl_atomic_add(ptr, 1);
544rbimpl_atomic_size_inc(volatile
size_t *ptr)
548#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
549 rbimpl_atomic_size_add(ptr, 1);
552 InterlockedIncrement64(ptr);
554#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
555 atomic_inc_ulong(ptr);
560 rbimpl_atomic_size_add(ptr, 1);
573#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
574 return __atomic_fetch_sub(ptr, val, __ATOMIC_SEQ_CST);
576#elif defined(HAVE_GCC_SYNC_BUILTINS)
577 return __sync_fetch_and_sub(ptr, val);
581 return InterlockedExchangeAdd(ptr, -val);
583#elif defined(__sun) && defined(HAVE_ATOMIC_H)
585 const signed neg = -1;
587 return atomic_add_int_nv(ptr, neg * val) + val;
590# error Unsupported platform.
602#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
603 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
605#elif defined(HAVE_GCC_SYNC_BUILTINS)
606 __sync_sub_and_fetch(ptr, val);
609 InterlockedExchangeAdd(ptr, -val);
611#elif defined(__sun) && defined(HAVE_ATOMIC_H)
612 const signed neg = -1;
614 atomic_add_int(ptr, neg * val);
617# error Unsupported platform.
625rbimpl_atomic_size_sub(volatile
size_t *ptr,
size_t val)
629#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
630 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
632#elif defined(HAVE_GCC_SYNC_BUILTINS)
633 __sync_sub_and_fetch(ptr, val);
636 const ssize_t neg = -1;
637 InterlockedExchangeAdd64(ptr, neg * val);
639#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
640 const signed neg = -1;
642 atomic_add_long(ptr, neg * val);
648 rbimpl_atomic_sub(tmp, val);
661#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
662 rbimpl_atomic_sub(ptr, 1);
665 InterlockedDecrement(ptr);
667#elif defined(__sun) && defined(HAVE_ATOMIC_H)
668 atomic_dec_uint(ptr);
671 rbimpl_atomic_sub(ptr, 1);
680rbimpl_atomic_size_dec(volatile
size_t *ptr)
684#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
685 rbimpl_atomic_size_sub(ptr, 1);
688 InterlockedDecrement64(ptr);
690#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
691 atomic_dec_ulong(ptr);
696 rbimpl_atomic_size_sub(ptr, 1);
709#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
710 __atomic_or_fetch(ptr, val, __ATOMIC_SEQ_CST);
712#elif defined(HAVE_GCC_SYNC_BUILTINS)
713 __sync_or_and_fetch(ptr, val);
715#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
716 _InterlockedOr(ptr, val);
718#elif defined(_WIN32) && defined(__GNUC__)
726#elif defined(_WIN32) && defined(_M_IX86)
729 __asm lock or [eax], ecx;
731#elif defined(__sun) && defined(HAVE_ATOMIC_H)
732 atomic_or_uint(ptr, val);
735# error Unsupported platform.
740#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
744 return rbimpl_atomic_or(var, val);
756#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
757 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
759#elif defined(HAVE_GCC_SYNC_BUILTINS)
760 return __sync_lock_test_and_set(ptr, val);
763 return InterlockedExchange(ptr, val);
765#elif defined(__sun) && defined(HAVE_ATOMIC_H)
766 return atomic_swap_uint(ptr, val);
769# error Unsupported platform.
777rbimpl_atomic_size_exchange(volatile
size_t *ptr,
size_t val)
781#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
782 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
784#elif defined(HAVE_GCC_SYNC_BUILTINS)
785 return __sync_lock_test_and_set(ptr, val);
788 return InterlockedExchange64(ptr, val);
790#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
791 return atomic_swap_ulong(ptr, val);
797 const rb_atomic_t ret = rbimpl_atomic_exchange(tmp, val);
798 return RBIMPL_CAST((
size_t)ret);
807rbimpl_atomic_size_set(volatile
size_t *ptr,
size_t val)
811#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
812 __atomic_store_n(ptr, val, __ATOMIC_SEQ_CST);
815 rbimpl_atomic_size_exchange(ptr, val);
824rbimpl_atomic_ptr_exchange(
void *volatile *ptr, const
void *val)
828#elif defined(InterlockedExchangePointer)
830 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
831 PVOID pval = RBIMPL_CAST((PVOID)val);
832 return InterlockedExchangePointer(pptr, pval);
834#elif defined(__sun) && defined(HAVE_ATOMIC_H)
835 return atomic_swap_ptr(ptr, RBIMPL_CAST((
void *)val));
840 const size_t sval = RBIMPL_CAST((
size_t)val);
841 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
842 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
843 return RBIMPL_CAST((
void *)sret);
852rbimpl_atomic_ptr_set(volatile
void **ptr,
void *val)
856 const size_t sval = RBIMPL_CAST((
size_t)val);
857 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
858 rbimpl_atomic_size_set(sptr, sval);
865rbimpl_atomic_value_exchange(volatile
VALUE *ptr,
VALUE val)
869 const size_t sval = RBIMPL_CAST((
size_t)val);
870 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
871 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
872 return RBIMPL_CAST((
VALUE)sret);
879rbimpl_atomic_value_set(volatile
VALUE *ptr,
VALUE val)
883 const size_t sval = RBIMPL_CAST((
size_t)val);
884 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
885 rbimpl_atomic_size_set(sptr, sval);
896#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
897 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
899 return rbimpl_atomic_fetch_add(ptr, 0);
911#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
912 __atomic_store_n(ptr, val, __ATOMIC_SEQ_CST);
916 rbimpl_atomic_exchange(ptr, val);
929#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
930 __atomic_compare_exchange_n(
931 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
934#elif defined(HAVE_GCC_SYNC_BUILTINS)
935 return __sync_val_compare_and_swap(ptr, oldval, newval);
937#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
938 return InterlockedCompareExchange(ptr, newval, oldval);
941 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
942 PVOID pold = RBIMPL_CAST((PVOID)oldval);
943 PVOID pnew = RBIMPL_CAST((PVOID)newval);
944 PVOID pret = InterlockedCompareExchange(pptr, pnew, pold);
947#elif defined(__sun) && defined(HAVE_ATOMIC_H)
948 return atomic_cas_uint(ptr, oldval, newval);
951# error Unsupported platform.
956#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
960 return rbimpl_atomic_cas(var, oldval, newval);
968rbimpl_atomic_size_cas(volatile
size_t *ptr,
size_t oldval,
size_t newval)
972#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
973 __atomic_compare_exchange_n(
974 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
977#elif defined(HAVE_GCC_SYNC_BUILTINS)
978 return __sync_val_compare_and_swap(ptr, oldval, newval);
981 return InterlockedCompareExchange64(ptr, newval, oldval);
983#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
984 return atomic_cas_ulong(ptr, oldval, newval);
990 return rbimpl_atomic_cas(tmp, oldval, newval);
999rbimpl_atomic_ptr_cas(
void **ptr, const
void *oldval, const
void *newval)
1003#elif defined(InterlockedExchangePointer)
1006 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
1007 PVOID pold = RBIMPL_CAST((PVOID)oldval);
1008 PVOID pnew = RBIMPL_CAST((PVOID)newval);
1009 return InterlockedCompareExchangePointer(pptr, pnew, pold);
1011#elif defined(__sun) && defined(HAVE_ATOMIC_H)
1012 void *pold = RBIMPL_CAST((
void *)oldval);
1013 void *pnew = RBIMPL_CAST((
void *)newval);
1014 return atomic_cas_ptr(ptr, pold, pnew);
1020 const size_t snew = RBIMPL_CAST((
size_t)newval);
1021 const size_t sold = RBIMPL_CAST((
size_t)oldval);
1022 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
1023 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
1024 return RBIMPL_CAST((
void *)sret);
1033rbimpl_atomic_ptr_load(
void **ptr)
1037#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
1038 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
1041 return rbimpl_atomic_ptr_cas(ptr, val, val);
1049rbimpl_atomic_value_cas(volatile
VALUE *ptr,
VALUE oldval,
VALUE newval)
1053 const size_t snew = RBIMPL_CAST((
size_t)newval);
1054 const size_t sold = RBIMPL_CAST((
size_t)oldval);
1055 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
1056 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
1057 return RBIMPL_CAST((
VALUE)sret);
Defines RBIMPL_ATTR_ARTIFICIAL.
#define RBIMPL_ATTR_ARTIFICIAL()
Wraps (or simulates) __attribute__((artificial))
#define RBIMPL_ASSERT_OR_ASSUME(...)
This is either RUBY_ASSERT or RBIMPL_ASSUME, depending on RUBY_DEBUG.
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
Defines RBIMPL_COMPILER_SINCE.
Defines RBIMPL_STATIC_ASSERT.
#define RBIMPL_STATIC_ASSERT
Wraps (or simulates) static_assert
Defines RBIMPL_ATTR_NOALIAS.
#define RBIMPL_ATTR_NOALIAS()
Wraps (or simulates) __declspec((noalias))
Defines RBIMPL_ATTR_NONNULL.
#define RBIMPL_ATTR_NONNULL(list)
Wraps (or simulates) __attribute__((nonnull))
#define inline
Old Visual Studio versions do not support the inline keyword, so we need to define it to be __inline.
uintptr_t VALUE
Type that represents a Ruby object.