Skip to content

Commit

Permalink
libc/atomic: decoupling atomic and spinlock to avoid recursion
Browse files Browse the repository at this point in the history
1. use irq save in AMP mode
2. use critical section in SMP mode

Signed-off-by: chao an <anchao@lixiang.com>
  • Loading branch information
anchao committed Oct 12, 2024
1 parent 1bba720 commit f9cbaf5
Showing 1 changed file with 63 additions and 37 deletions.
100 changes: 63 additions & 37 deletions libs/libc/machine/arch_atomic.c
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,33 @@

#include <stdbool.h>
#include <stdint.h>
#include <nuttx/spinlock.h>
#include <nuttx/irq.h>

/****************************************************************************
* Private Data
****************************************************************************/

#ifdef CONFIG_SMP
static inline_function irqstate_t atomic_lock(void)
{
return enter_critical_section();
}

static inline_function void atomic_unlock(irqstate_t flags)
{
leave_critical_section(flags);
}
#else
static inline_function irqstate_t atomic_lock(void)
{
return up_irq_save();
}

static inline_function void atomic_unlock(irqstate_t flags)
{
up_irq_restore(flags);
}
#endif

/****************************************************************************
* Pre-processor Definitions
Expand All @@ -39,23 +65,23 @@
void weak_function __atomic_store_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
\
*(FAR type *)ptr = value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
}

#define LOAD(n, type) \
\
type weak_function __atomic_load_##n (FAR const volatile void *ptr, \
int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
\
type ret = *(FAR type *)ptr; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -64,13 +90,13 @@
type weak_function __atomic_exchange_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
type ret = *tmp; \
*tmp = value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -82,7 +108,7 @@
int success, int failure) \
{ \
bool ret = false; \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmpmem = (FAR type *)mem; \
FAR type *tmpexp = (FAR type *)expect; \
\
Expand All @@ -96,7 +122,7 @@
*tmpexp = *tmpmem; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -105,13 +131,13 @@
type weak_function __atomic_flags_test_and_set##n (FAR volatile void *ptr, \
int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*(FAR type *)ptr = 1; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -120,13 +146,13 @@
type weak_function __atomic_fetch_add_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp + value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -135,13 +161,13 @@
type weak_function __atomic_fetch_sub_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp - value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -150,13 +176,13 @@
type weak_function __atomic_fetch_and_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp & value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -165,13 +191,13 @@
type weak_function __atomic_fetch_or_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp | value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -180,13 +206,13 @@
type weak_function __atomic_fetch_xor_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp ^ value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -195,12 +221,12 @@
type weak_function __sync_add_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp + value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -209,12 +235,12 @@
type weak_function __sync_sub_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp - value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -223,12 +249,12 @@
type weak_function __sync_or_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp | value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -237,12 +263,12 @@
type weak_function __sync_and_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp & value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -251,12 +277,12 @@
type weak_function __sync_xor_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp ^ value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -265,12 +291,12 @@
type weak_function __sync_nand_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = ~(*tmp & value); \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -281,7 +307,7 @@
type newvalue) \
{ \
bool ret = false; \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
if (*tmp == oldvalue) \
Expand All @@ -290,7 +316,7 @@
*tmp = newvalue; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -300,7 +326,7 @@
type oldvalue, \
type newvalue) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
Expand All @@ -309,7 +335,7 @@
*tmp = newvalue; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand Down

0 comments on commit f9cbaf5

Please sign in to comment.