diff --git a/libs/libc/machine/arch_atomic.c b/libs/libc/machine/arch_atomic.c index 5da80f452c424..88542c72e8d6a 100644 --- a/libs/libc/machine/arch_atomic.c +++ b/libs/libc/machine/arch_atomic.c @@ -28,7 +28,33 @@ #include #include -#include +#include + +/**************************************************************************** + * Private Data + ****************************************************************************/ + +#ifdef CONFIG_SMP +static inline_function irqstate_t atomic_lock(void) +{ + return enter_critical_section(); +} + +static inline_function void atomic_unlock(irqstate_t flags) +{ + leave_critical_section(flags); +} +#else +static inline_function irqstate_t atomic_lock(void) +{ + return up_irq_save(); +} + +static inline_function void atomic_unlock(irqstate_t flags) +{ + up_irq_restore(flags); +} +#endif /**************************************************************************** * Pre-processor Definitions @@ -39,11 +65,11 @@ void weak_function __atomic_store_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ \ *(FAR type *)ptr = value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ } #define LOAD(n, type) \ @@ -51,11 +77,11 @@ type weak_function __atomic_load_##n (FAR const volatile void *ptr, \ int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ \ type ret = *(FAR type *)ptr; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -64,13 +90,13 @@ type weak_function __atomic_exchange_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ type ret = *tmp; \ *tmp = value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -82,7 +108,7 @@ int success, int failure) \ { \ bool ret = false; \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmpmem = (FAR type *)mem; \ FAR type *tmpexp = (FAR type *)expect; \ \ @@ -96,7 +122,7 @@ *tmpexp = *tmpmem; \ } \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -105,13 +131,13 @@ type weak_function __atomic_flags_test_and_set##n (FAR volatile void *ptr, \ int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *(FAR type *)ptr = 1; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -120,13 +146,13 @@ type weak_function __atomic_fetch_add_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp + value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -135,13 +161,13 @@ type weak_function __atomic_fetch_sub_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp - value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -150,13 +176,13 @@ type weak_function __atomic_fetch_and_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp & value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -165,13 +191,13 @@ type weak_function __atomic_fetch_or_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp | value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -180,13 +206,13 @@ type weak_function __atomic_fetch_xor_##n (FAR volatile void *ptr, \ type value, int memorder) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ *tmp = *tmp ^ value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -195,12 +221,12 @@ type weak_function __sync_add_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp + value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -209,12 +235,12 @@ type weak_function __sync_sub_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp - value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -223,12 +249,12 @@ type weak_function __sync_or_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp | value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -237,12 +263,12 @@ type weak_function __sync_and_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp & value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -251,12 +277,12 @@ type weak_function __sync_xor_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = *tmp ^ value; \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -265,12 +291,12 @@ type weak_function __sync_nand_and_fetch_##n (FAR volatile void *ptr, \ type value) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ *tmp = ~(*tmp & value); \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return *tmp; \ } @@ -281,7 +307,7 @@ type newvalue) \ { \ bool ret = false; \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ \ if (*tmp == oldvalue) \ @@ -290,7 +316,7 @@ *tmp = newvalue; \ } \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ } @@ -300,7 +326,7 @@ type oldvalue, \ type newvalue) \ { \ - irqstate_t irqstate = spin_lock_irqsave(NULL); \ + irqstate_t irqstate = atomic_lock(); \ FAR type *tmp = (FAR type *)ptr; \ type ret = *tmp; \ \ @@ -309,7 +335,7 @@ *tmp = newvalue; \ } \ \ - spin_unlock_irqrestore(NULL, irqstate); \ + atomic_unlock(irqstate); \ return ret; \ }