Skip to content

Commit

Permalink
libc/atomic: decoupling atomic and spinlock to avoid recursion
Browse files Browse the repository at this point in the history
1. use irq save in AMP mode
2. use mutex lock in SMP mode

Signed-off-by: chao an <[email protected]>
  • Loading branch information
anchao committed Oct 12, 2024
1 parent 1bba720 commit 12d2ce1
Showing 1 changed file with 67 additions and 37 deletions.
104 changes: 67 additions & 37 deletions libs/libc/machine/arch_atomic.c
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,37 @@

#include <stdbool.h>
#include <stdint.h>
#include <nuttx/spinlock.h>
#include <nuttx/irq.h>
#include <nuttx/mutex.h>

/****************************************************************************
* Private Data
****************************************************************************/

#ifdef CONFIG_SMP
static mutex_t g_atomic_lock = NXMUTEX_INITIALIZER;

static inline irqstate_t atomic_lock(void)
{
return nxmutex_lock(&g_atomic_lock);
}

static inline void atomic_unlock(irqstate_t flags)
{
UNUSED(flags);
nxmutex_unlock(&g_atomic_lock);
}
#else
static inline irqstate_t atomic_lock(void)
{
return up_irq_save();
}

static inline void atomic_unlock(irqstate_t flags)
{
up_irq_restore(flags);
}
#endif

/****************************************************************************
* Pre-processor Definitions
Expand All @@ -39,23 +69,23 @@
void weak_function __atomic_store_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
\
*(FAR type *)ptr = value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
}

#define LOAD(n, type) \
\
type weak_function __atomic_load_##n (FAR const volatile void *ptr, \
int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
\
type ret = *(FAR type *)ptr; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -64,13 +94,13 @@
type weak_function __atomic_exchange_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
type ret = *tmp; \
*tmp = value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -82,7 +112,7 @@
int success, int failure) \
{ \
bool ret = false; \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmpmem = (FAR type *)mem; \
FAR type *tmpexp = (FAR type *)expect; \
\
Expand All @@ -96,7 +126,7 @@
*tmpexp = *tmpmem; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -105,13 +135,13 @@
type weak_function __atomic_flags_test_and_set##n (FAR volatile void *ptr, \
int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*(FAR type *)ptr = 1; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -120,13 +150,13 @@
type weak_function __atomic_fetch_add_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp + value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -135,13 +165,13 @@
type weak_function __atomic_fetch_sub_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp - value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -150,13 +180,13 @@
type weak_function __atomic_fetch_and_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp & value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -165,13 +195,13 @@
type weak_function __atomic_fetch_or_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp | value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -180,13 +210,13 @@
type weak_function __atomic_fetch_xor_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp ^ value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -195,12 +225,12 @@
type weak_function __sync_add_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp + value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -209,12 +239,12 @@
type weak_function __sync_sub_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp - value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -223,12 +253,12 @@
type weak_function __sync_or_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp | value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -237,12 +267,12 @@
type weak_function __sync_and_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp & value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -251,12 +281,12 @@
type weak_function __sync_xor_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp ^ value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -265,12 +295,12 @@
type weak_function __sync_nand_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = ~(*tmp & value); \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -281,7 +311,7 @@
type newvalue) \
{ \
bool ret = false; \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
if (*tmp == oldvalue) \
Expand All @@ -290,7 +320,7 @@
*tmp = newvalue; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -300,7 +330,7 @@
type oldvalue, \
type newvalue) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
Expand All @@ -309,7 +339,7 @@
*tmp = newvalue; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand Down

0 comments on commit 12d2ce1

Please sign in to comment.