27#include "ruby/internal/config.h"
33#ifdef HAVE_SYS_TYPES_H
34# include <sys/types.h>
37#if RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
38# pragma intrinsic(_InterlockedOr)
39#elif defined(__sun) && defined(HAVE_ATOMIC_H)
44#include "ruby/backward/2/limits.h"
49#include "ruby/internal/cast.h"
59#if defined(__DOXYGEN__) || defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
60# define RUBY_ATOMIC_GENERIC_MACRO 1
68#if defined(__DOXYGEN__)
70#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
72#elif defined(HAVE_GCC_SYNC_BUILTINS)
76#elif defined(__sun) && defined(HAVE_ATOMIC_H)
79# error No atomic operation found
91#define RUBY_ATOMIC_FETCH_ADD(var, val) rbimpl_atomic_fetch_add(&(var), (val))
102#define RUBY_ATOMIC_FETCH_SUB(var, val) rbimpl_atomic_fetch_sub(&(var), (val))
114#define RUBY_ATOMIC_OR(var, val) rbimpl_atomic_or(&(var), (val))
125#define RUBY_ATOMIC_EXCHANGE(var, val) rbimpl_atomic_exchange(&(var), (val))
138#define RUBY_ATOMIC_CAS(var, oldval, newval) \
139 rbimpl_atomic_cas(&(var), (oldval), (newval))
149#define RUBY_ATOMIC_SET(var, val) rbimpl_atomic_set(&(var), (val))
159#define RUBY_ATOMIC_ADD(var, val) rbimpl_atomic_add(&(var), (val))
169#define RUBY_ATOMIC_SUB(var, val) rbimpl_atomic_sub(&(var), (val))
178#define RUBY_ATOMIC_INC(var) rbimpl_atomic_inc(&(var))
187#define RUBY_ATOMIC_DEC(var) rbimpl_atomic_dec(&(var))
198#define RUBY_ATOMIC_SIZE_INC(var) rbimpl_atomic_size_inc(&(var))
209#define RUBY_ATOMIC_SIZE_DEC(var) rbimpl_atomic_size_dec(&(var))
222#define RUBY_ATOMIC_SIZE_EXCHANGE(var, val) \
223 rbimpl_atomic_size_exchange(&(var), (val))
236#define RUBY_ATOMIC_SIZE_CAS(var, oldval, newval) \
237 rbimpl_atomic_size_cas(&(var), (oldval), (newval))
249#define RUBY_ATOMIC_SIZE_ADD(var, val) rbimpl_atomic_size_add(&(var), (val))
261#define RUBY_ATOMIC_SIZE_SUB(var, val) rbimpl_atomic_size_sub(&(var), (val))
279#define RUBY_ATOMIC_PTR_EXCHANGE(var, val) \
280 RBIMPL_CAST(rbimpl_atomic_ptr_exchange((void **)&(var), (void *)val))
293#define RUBY_ATOMIC_PTR_CAS(var, oldval, newval) \
294 RBIMPL_CAST(rbimpl_atomic_ptr_cas((void **)&(var), (oldval), (newval)))
307#define RUBY_ATOMIC_VALUE_EXCHANGE(var, val) \
308 rbimpl_atomic_value_exchange(&(var), (val))
321#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval) \
322 rbimpl_atomic_value_cas(&(var), (oldval), (newval))
333#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
334 return __atomic_fetch_add(ptr, val, __ATOMIC_SEQ_CST);
336#elif defined(HAVE_GCC_SYNC_BUILTINS)
337 return __sync_fetch_and_add(ptr, val);
340 return InterlockedExchangeAdd(ptr, val);
342#elif defined(__sun) && defined(HAVE_ATOMIC_H)
349 return atomic_add_int_nv(ptr, val) - val;
352# error Unsupported platform.
364#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
370 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
372#elif defined(HAVE_GCC_SYNC_BUILTINS)
373 __sync_add_and_fetch(ptr, val);
381 InterlockedExchangeAdd(ptr, val);
383#elif defined(__sun) && defined(HAVE_ATOMIC_H)
386 atomic_add_int(ptr, val);
389# error Unsupported platform.
397rbimpl_atomic_size_add(volatile
size_t *ptr,
size_t val)
401#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
402 __atomic_add_fetch(ptr, val, __ATOMIC_SEQ_CST);
404#elif defined(HAVE_GCC_SYNC_BUILTINS)
405 __sync_add_and_fetch(ptr, val);
407#elif defined(_WIN32) && defined(_M_AMD64)
409 InterlockedExchangeAdd64(ptr, val);
411#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
414 atomic_add_long(ptr, val);
420 rbimpl_atomic_add(tmp, val);
433#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
434 rbimpl_atomic_add(ptr, 1);
437 InterlockedIncrement(ptr);
439#elif defined(__sun) && defined(HAVE_ATOMIC_H)
440 atomic_inc_uint(ptr);
443 rbimpl_atomic_add(ptr, 1);
452rbimpl_atomic_size_inc(volatile
size_t *ptr)
456#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
457 rbimpl_atomic_size_add(ptr, 1);
459#elif defined(_WIN32) && defined(_M_AMD64)
460 InterlockedIncrement64(ptr);
462#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
463 atomic_inc_ulong(ptr);
466 rbimpl_atomic_size_add(ptr, 1);
479#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
480 return __atomic_fetch_sub(ptr, val, __ATOMIC_SEQ_CST);
482#elif defined(HAVE_GCC_SYNC_BUILTINS)
483 return __sync_fetch_and_sub(ptr, val);
487 return InterlockedExchangeAdd(ptr, -val);
489#elif defined(__sun) && defined(HAVE_ATOMIC_H)
491 const signed neg = -1;
493 return atomic_add_int_nv(ptr, neg * val) + val;
496# error Unsupported platform.
508#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
509 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
511#elif defined(HAVE_GCC_SYNC_BUILTINS)
512 __sync_sub_and_fetch(ptr, val);
515 InterlockedExchangeAdd(ptr, -val);
517#elif defined(__sun) && defined(HAVE_ATOMIC_H)
518 const signed neg = -1;
520 atomic_add_int(ptr, neg * val);
523# error Unsupported platform.
531rbimpl_atomic_size_sub(volatile
size_t *ptr,
size_t val)
535#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
536 __atomic_sub_fetch(ptr, val, __ATOMIC_SEQ_CST);
538#elif defined(HAVE_GCC_SYNC_BUILTINS)
539 __sync_sub_and_fetch(ptr, val);
541#elif defined(_WIN32) && defined(_M_AMD64)
542 const ssize_t neg = -1;
543 InterlockedExchangeAdd64(ptr, neg * val);
545#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
546 const signed neg = -1;
548 atomic_add_long(ptr, neg * val);
554 rbimpl_atomic_sub(tmp, val);
567#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
568 rbimpl_atomic_sub(ptr, 1);
571 InterlockedDecrement(ptr);
573#elif defined(__sun) && defined(HAVE_ATOMIC_H)
574 atomic_dec_uint(ptr);
577 rbimpl_atomic_sub(ptr, 1);
586rbimpl_atomic_size_dec(volatile
size_t *ptr)
590#elif defined(HAVE_GCC_ATOMIC_BUILTINS) || defined(HAVE_GCC_SYNC_BUILTINS)
591 rbimpl_atomic_size_sub(ptr, 1);
593#elif defined(_WIN32) && defined(_M_AMD64)
594 InterlockedDecrement64(ptr);
596#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
597 atomic_dec_ulong(ptr);
600 rbimpl_atomic_size_sub(ptr, 1);
613#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
614 __atomic_or_fetch(ptr, val, __ATOMIC_SEQ_CST);
616#elif defined(HAVE_GCC_SYNC_BUILTINS)
617 __sync_or_and_fetch(ptr, val);
619#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
620 _InterlockedOr(ptr, val);
622#elif defined(_WIN32) && defined(__GNUC__)
630#elif defined(_WIN32) && defined(_M_IX86)
633 __asm lock or [eax], ecx;
635#elif defined(__sun) && defined(HAVE_ATOMIC_H)
636 atomic_or_uint(ptr, val);
639# error Unsupported platform.
644#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
648 return rbimpl_atomic_or(var, val);
660#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
661 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
663#elif defined(HAVE_GCC_SYNC_BUILTINS)
664 return __sync_lock_test_and_set(ptr, val);
667 return InterlockedExchange(ptr, val);
669#elif defined(__sun) && defined(HAVE_ATOMIC_H)
670 return atomic_swap_uint(ptr, val);
673# error Unsupported platform.
681rbimpl_atomic_size_exchange(volatile
size_t *ptr,
size_t val)
685#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
686 return __atomic_exchange_n(ptr, val, __ATOMIC_SEQ_CST);
688#elif defined(HAVE_GCC_SYNC_BUILTINS)
689 return __sync_lock_test_and_set(ptr, val);
691#elif defined(_WIN32) && defined(_M_AMD64)
692 return InterlockedExchange64(ptr, val);
694#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
695 return atomic_swap_ulong(ptr, val);
701 const rb_atomic_t ret = rbimpl_atomic_exchange(tmp, val);
702 return RBIMPL_CAST((
size_t)ret);
711rbimpl_atomic_ptr_exchange(
void *volatile *ptr, const
void *val)
715#elif defined(InterlockedExchangePointer)
717 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
718 PVOID pval = RBIMPL_CAST((PVOID)val);
719 return InterlockedExchangePointer(pptr, pval);
721#elif defined(__sun) && defined(HAVE_ATOMIC_H)
722 return atomic_swap_ptr(ptr, RBIMPL_CAST((
void *)val));
727 const size_t sval = RBIMPL_CAST((
size_t)val);
728 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
729 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
730 return RBIMPL_CAST((
void *)sret);
739rbimpl_atomic_value_exchange(volatile
VALUE *ptr,
VALUE val)
743 const size_t sval = RBIMPL_CAST((
size_t)val);
744 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
745 const size_t sret = rbimpl_atomic_size_exchange(sptr, sval);
746 return RBIMPL_CAST((
VALUE)sret);
757#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
758 __atomic_store_n(ptr, val, __ATOMIC_SEQ_CST);
762 rbimpl_atomic_exchange(ptr, val);
775#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
776 __atomic_compare_exchange_n(
777 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
780#elif defined(HAVE_GCC_SYNC_BUILTINS)
781 return __sync_val_compare_and_swap(ptr, oldval, newval);
783#elif RBIMPL_COMPILER_SINCE(MSVC, 13, 0, 0)
784 return InterlockedCompareExchange(ptr, newval, oldval);
787 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
788 PVOID pold = RBIMPL_CAST((PVOID)oldval);
789 PVOID pnew = RBIMPL_CAST((PVOID)newval);
790 PVOID pret = InterlockedCompareExchange(pptr, pnew, pold);
793#elif defined(__sun) && defined(HAVE_ATOMIC_H)
794 return atomic_cas_uint(ptr, oldval, newval);
797# error Unsupported platform.
802#if RBIMPL_COMPILER_BEFORE(MSVC, 13, 0, 0)
806 return rbimpl_atomic_cas(var, oldval, newval);
814rbimpl_atomic_size_cas(volatile
size_t *ptr,
size_t oldval,
size_t newval)
818#elif defined(HAVE_GCC_ATOMIC_BUILTINS)
819 __atomic_compare_exchange_n(
820 ptr, &oldval, newval, 0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST);
823#elif defined(HAVE_GCC_SYNC_BUILTINS)
824 return __sync_val_compare_and_swap(ptr, oldval, newval);
826#elif defined(_WIN32) && defined(_M_AMD64)
827 return InterlockedCompareExchange64(ptr, newval, oldval);
829#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
830 return atomic_cas_ulong(ptr, oldval, newval);
836 return rbimpl_atomic_cas(tmp, oldval, newval);
845rbimpl_atomic_ptr_cas(
void **ptr, const
void *oldval, const
void *newval)
849#elif defined(InterlockedExchangePointer)
852 PVOID *pptr = RBIMPL_CAST((PVOID *)ptr);
853 PVOID pold = RBIMPL_CAST((PVOID)oldval);
854 PVOID pnew = RBIMPL_CAST((PVOID)newval);
855 return InterlockedCompareExchangePointer(pptr, pnew, pold);
857#elif defined(__sun) && defined(HAVE_ATOMIC_H)
858 void *pold = RBIMPL_CAST((
void *)oldval);
859 void *pnew = RBIMPL_CAST((
void *)newval);
860 return atomic_cas_ptr(ptr, pold, pnew);
866 const size_t snew = RBIMPL_CAST((
size_t)newval);
867 const size_t sold = RBIMPL_CAST((
size_t)oldval);
868 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
869 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
870 return RBIMPL_CAST((
void *)sret);
879rbimpl_atomic_value_cas(volatile
VALUE *ptr,
VALUE oldval,
VALUE newval)
883 const size_t snew = RBIMPL_CAST((
size_t)newval);
884 const size_t sold = RBIMPL_CAST((
size_t)oldval);
885 volatile size_t *
const sptr = RBIMPL_CAST((
volatile size_t *)ptr);
886 const size_t sret = rbimpl_atomic_size_cas(sptr, sold, snew);
887 return RBIMPL_CAST((
VALUE)sret);
Defines RBIMPL_ATTR_ARTIFICIAL.
#define RBIMPL_ATTR_ARTIFICIAL()
Wraps (or simulates) __attribute__((artificial))
#define RBIMPL_ASSERT_OR_ASSUME(expr)
This is either RUBY_ASSERT or RBIMPL_ASSUME, depending on RUBY_DEBUG.
std::atomic< unsigned > rb_atomic_t
Type that is eligible for atomic operations.
Defines RBIMPL_COMPILER_SINCE.
Defines RBIMPL_STATIC_ASSERT.
#define RBIMPL_STATIC_ASSERT
Wraps (or simulates) static_assert
Defines RBIMPL_ATTR_NOALIAS.
#define RBIMPL_ATTR_NOALIAS()
Wraps (or simulates) __declspec((noalias))
Defines RBIMPL_ATTR_NONNULL.
#define RBIMPL_ATTR_NONNULL(list)
Wraps (or simulates) __attribute__((nonnull))
uintptr_t VALUE
Type that represents a Ruby object.