27#include "ruby/internal/config.h"
33#ifdef HAVE_SYS_TYPES_H
34# include <sys/types.h>
37#if RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
38# pragma intrinsic(_InterlockedOr)
39#elif defined(__sun) && defined(HAVE_ATOMIC_H)
44#include "ruby/backward/2/limits.h"
49#include "ruby/internal/cast.h"
59#if defined(__DOXYGEN__) || defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
60# define RUBY_ATOMIC_GENERIC_MACRO 1
68#if defined(__DOXYGEN__)
70#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
72#elif defined(HAVE_GCC_SYNC_BUILTINS)
78#elif defined(__sun) && defined(HAVE_ATOMIC_H)
81# error No atomic operation found
93#define RUBY_ATOMIC_FETCH_ADD(var, val) rbimpl_atomic_fetch_add(&(var), (val))
104#define RUBY_ATOMIC_FETCH_SUB(var, val) rbimpl_atomic_fetch_sub(&(var), (val))
116#define RUBY_ATOMIC_OR(var, val) rbimpl_atomic_or(&(var), (val))
127#define RUBY_ATOMIC_EXCHANGE(var, val) rbimpl_atomic_exchange(&(var), (val))
140#define RUBY_ATOMIC_CAS(var, oldval, newval) \
141 rbimpl_atomic_cas(&(var), (oldval), (newval))
150#define RUBY_ATOMIC_LOAD(var) rbimpl_atomic_load(&(var))
160#define RUBY_ATOMIC_SET(var, val) rbimpl_atomic_set(&(var), (val))
170#define RUBY_ATOMIC_ADD(var, val) rbimpl_atomic_add(&(var), (val))
180#define RUBY_ATOMIC_SUB(var, val) rbimpl_atomic_sub(&(var), (val))
189#define RUBY_ATOMIC_INC(var) rbimpl_atomic_inc(&(var))
198#define RUBY_ATOMIC_DEC(var) rbimpl_atomic_dec(&(var))
209#define RUBY_ATOMIC_SIZE_INC(var) rbimpl_atomic_size_inc(&(var))
220#define RUBY_ATOMIC_SIZE_DEC(var) rbimpl_atomic_size_dec(&(var))
233#define RUBY_ATOMIC_SIZE_EXCHANGE(var, val) \
234 rbimpl_atomic_size_exchange(&(var), (val))
247#define RUBY_ATOMIC_SIZE_CAS(var, oldval, newval) \
248 rbimpl_atomic_size_cas(&(var), (oldval), (newval))
260#define RUBY_ATOMIC_SIZE_ADD(var, val) rbimpl_atomic_size_add(&(var), (val))
272#define RUBY_ATOMIC_SIZE_SUB(var, val) rbimpl_atomic_size_sub(&(var), (val))
290#define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \
291 RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val))
301#define RUBY_ATOMIC_PTR_LOAD(var) \
302 RBIMPL_CAST(rbimpl_atomic_ptr_load((void **)&var))
315#define RUBY_ATOMIC_PTR_CAS(var, oldval, newval) \
316 RBIMPL_CAST(rbimpl_atomic_ptr_cas((void **)&(var), (void *)(oldval), (void *)(newval)))
329#define RUBY_ATOMIC_VALUE_EXCHANGE(var, val) \
330 rbimpl_atomic_value_exchange(&(var), (val))
343#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval) \
344 rbimpl_atomic_value_cas(&(var), (oldval), (newval))
355#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
356 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
358#elif defined(HAVE_GCC_SYNC_BUILTINS)
359 return __sync_fetch_and_add(ptr, val);
362 return InterlockedExchangeAdd(ptr, val);
364#elif defined(__sun) && defined(HAVE_ATOMIC_H)
371 return atomic_add_int_nv(ptr, val) - val;
374# error Unsupported platform.
386#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
392 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
394#elif defined(HAVE_GCC_SYNC_BUILTINS)
395 __sync_add_and_fetch(ptr, val);
403 InterlockedExchangeAdd(ptr, val);
405#elif defined(__sun) && defined(HAVE_ATOMIC_H)
408 atomic_add_int(ptr, val);
411# error Unsupported platform.
419rbimpl_atomic_size_add(volatile
size_t *ptr,
size_t val)
423#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
424 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
426#elif defined(HAVE_GCC_SYNC_BUILTINS)
427 __sync_add_and_fetch(ptr, val);
431 InterlockedExchangeAdd64(ptr, val);
433#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
436 atomic_add_long(ptr, val);
442 rbimpl_atomic_add(tmp, val);
455#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
456 rbimpl_atomic_add(ptr, 1);
459 InterlockedIncrement(ptr);
461#elif defined(__sun) && defined(HAVE_ATOMIC_H)
462 atomic_inc_uint(ptr);
465 rbimpl_atomic_add(ptr, 1);
474rbimpl_atomic_size_inc(volatile
size_t *ptr)
478#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
479 rbimpl_atomic_size_add(ptr, 1);
482 InterlockedIncrement64(ptr);
484#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
485 atomic_inc_ulong(ptr);
490 rbimpl_atomic_size_add(ptr, 1);
503#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
504 return __atomic_fetch_sub(ptr, val, __ATOMIC_SEQ_CST);
506#elif defined(HAVE_GCC_SYNC_BUILTINS)
507 return __sync_fetch_and_sub(ptr, val);
511 return InterlockedExchangeAdd(ptr, -val);
513#elif defined(__sun) && defined(HAVE_ATOMIC_H)
515 const signed neg = -1;
517 return atomic_add_int_nv(ptr, neg * val) + val;
520# error Unsupported platform.
532#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
533 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
535#elif defined(HAVE_GCC_SYNC_BUILTINS)
536 __sync_sub_and_fetch(ptr, val);
539 InterlockedExchangeAdd(ptr, -val);
541#elif defined(__sun) && defined(HAVE_ATOMIC_H)
542 const signed neg = -1;
544 atomic_add_int(ptr, neg * val);
547# error Unsupported platform.
555rbimpl_atomic_size_sub(volatile
size_t *ptr,
size_t val)
559#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
560 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
562#elif defined(HAVE_GCC_SYNC_BUILTINS)
563 __sync_sub_and_fetch(ptr, val);
566 const ssize_t neg = -1;
567 InterlockedExchangeAdd64(ptr, neg * val);
569#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
570 const signed neg = -1;
572 atomic_add_long(ptr, neg * val);
578 rbimpl_atomic_sub(tmp, val);
591#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
592 rbimpl_atomic_sub(ptr, 1);
595 InterlockedDecrement(ptr);
597#elif defined(__sun) && defined(HAVE_ATOMIC_H)
598 atomic_dec_uint(ptr);
601 rbimpl_atomic_sub(ptr, 1);
610rbimpl_atomic_size_dec(volatile
size_t *ptr)
614#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
615 rbimpl_atomic_size_sub(ptr, 1);
618 InterlockedDecrement64(ptr);
620#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
621 atomic_dec_ulong(ptr);
626 rbimpl_atomic_size_sub(ptr, 1);
639#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
640 __atomic_or_fetch(ptr, val, __ATOMIC_SEQ_CST);
642#elif defined(HAVE_GCC_SYNC_BUILTINS)
643 __sync_or_and_fetch(ptr, val);
645#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
646 _InterlockedOr(ptr, val);
648#elif defined(_WIN32) && defined(__GNUC__)
656#elif defined(_WIN32) && defined(_M_IX86)
659 __asm lock or [eax], ecx;
661#elif defined(__sun) && defined(HAVE_ATOMIC_H)
662 atomic_or_uint(ptr, val);
665# error Unsupported platform.
670#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
674 return rbimpl_atomic_or(var, val);
686#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
687 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
689#elif defined(HAVE_GCC_SYNC_BUILTINS)
690 return __sync_lock_test_and_set(ptr, val);
693 return InterlockedExchange(ptr, val);
695#elif defined(__sun) && defined(HAVE_ATOMIC_H)
696 return atomic_swap_uint(ptr, val);
699# error Unsupported platform.
707rbimpl_atomic_size_exchange(volatile
size_t *ptr,
size_t val)
711#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
712 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
714#elif defined(HAVE_GCC_SYNC_BUILTINS)
715 return __sync_lock_test_and_set(ptr, val);
718 return InterlockedExchange64(ptr, val);
720#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
721 return atomic_swap_ulong(ptr, val);
727 const rb_atomic_t ret = rbimpl_atomic_exchange(tmp, val);
728 return RBIMPL_CAST((
size_t)ret);
737rbimpl_atomic_ptr_exchange(
void *volatile *ptr, const
void *val)
741#elif defined(InterlockedExchangePointer)
743 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
744 PVOID pval = RBIMPL_CAST((PVOID)val);
745 return InterlockedExchangePointer(pptr, pval);
747#elif defined(__sun) && defined(HAVE_ATOMIC_H)
748 return atomic_swap_ptr(ptr, RBIMPL_CAST((
void *)val));
753 const size_t sval = RBIMPL_CAST((
size_t)val);
754 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
755 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
756 return RBIMPL_CAST((
void *)sret);
765rbimpl_atomic_value_exchange(volatile
VALUE *ptr,
VALUE val)
769 const size_t sval = RBIMPL_CAST((
size_t)val);
770 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
771 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
772 return RBIMPL_CAST((
VALUE)sret);
783#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
784 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
786 return rbimpl_atomic_fetch_add(ptr, 0);
798#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
799 __atomic_store_n(ptr, val, __ATOMIC_SEQ_CST);
803 rbimpl_atomic_exchange(ptr, val);
816#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
817 __atomic_compare_exchange_n(
818 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
821#elif defined(HAVE_GCC_SYNC_BUILTINS)
822 return __sync_val_compare_and_swap(ptr, oldval, newval);
824#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
825 return InterlockedCompareExchange(ptr, newval, oldval);
828 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
829 PVOID pold = RBIMPL_CAST((PVOID)oldval);
830 PVOID pnew = RBIMPL_CAST((PVOID)newval);
831 PVOID pret = InterlockedCompareExchange(pptr, pnew, pold);
834#elif defined(__sun) && defined(HAVE_ATOMIC_H)
835 return atomic_cas_uint(ptr, oldval, newval);
838# error Unsupported platform.
843#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
847 return rbimpl_atomic_cas(var, oldval, newval);
855rbimpl_atomic_size_cas(volatile
size_t *ptr,
size_t oldval,
size_t newval)
859#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
860 __atomic_compare_exchange_n(
861 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
864#elif defined(HAVE_GCC_SYNC_BUILTINS)
865 return __sync_val_compare_and_swap(ptr, oldval, newval);
868 return InterlockedCompareExchange64(ptr, newval, oldval);
870#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
871 return atomic_cas_ulong(ptr, oldval, newval);
877 return rbimpl_atomic_cas(tmp, oldval, newval);
886rbimpl_atomic_ptr_cas(
void **ptr, const
void *oldval, const
void *newval)
890#elif defined(InterlockedExchangePointer)
893 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
894 PVOID pold = RBIMPL_CAST((PVOID)oldval);
895 PVOID pnew = RBIMPL_CAST((PVOID)newval);
896 return InterlockedCompareExchangePointer(pptr, pnew, pold);
898#elif defined(__sun) && defined(HAVE_ATOMIC_H)
899 void *pold = RBIMPL_CAST((
void *)oldval);
900 void *pnew = RBIMPL_CAST((
void *)newval);
901 return atomic_cas_ptr(ptr, pold, pnew);
907 const size_t snew = RBIMPL_CAST((
size_t)newval);
908 const size_t sold = RBIMPL_CAST((
size_t)oldval);
909 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
910 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
911 return RBIMPL_CAST((
void *)sret);
920rbimpl_atomic_ptr_load(
void **ptr)
924#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
925 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
928 return rbimpl_atomic_ptr_cas(ptr, val, val);
936rbimpl_atomic_value_cas(volatile
VALUE *ptr,
VALUE oldval,
VALUE newval)
940 const size_t snew = RBIMPL_CAST((
size_t)newval);
941 const size_t sold = RBIMPL_CAST((
size_t)oldval);
942 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
943 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
944 return RBIMPL_CAST((
VALUE)sret);
Defines RBIMPL_ATTR_ARTIFICIAL.
#define RBIMPL_ATTR_ARTIFICIAL()
Wraps (or simulates) __attribute__((artificial))
#define RBIMPL_ASSERT_OR_ASSUME(...)
This is either RUBY_ASSERT or RBIMPL_ASSUME, depending on RUBY_DEBUG.
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
Defines RBIMPL_COMPILER_SINCE.
Defines RBIMPL_STATIC_ASSERT.
#define RBIMPL_STATIC_ASSERT
Wraps (or simulates) static_assert
Defines RBIMPL_ATTR_NOALIAS.
#define RBIMPL_ATTR_NOALIAS()
Wraps (or simulates) __declspec((noalias))
Defines RBIMPL_ATTR_NONNULL.
#define RBIMPL_ATTR_NONNULL(list)
Wraps (or simulates) __attribute__((nonnull))
#define inline
Old Visual Studio versions do not support the inline keyword, so we need to define it to be __inline.
uintptr_t VALUE
Type that represents a Ruby object.