Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

libc/atomic: decoupling atomic and spinlock to avoid recursion #14198

Merged
merged 1 commit into from
Oct 13, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 63 additions & 37 deletions libs/libc/machine/arch_atomic.c
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,33 @@

#include <stdbool.h>
#include <stdint.h>
#include <nuttx/spinlock.h>
#include <nuttx/irq.h>

/****************************************************************************
* Private Data
****************************************************************************/

#ifdef CONFIG_SMP
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hardware must support atomic operation in SMP, so we.don't.need this.codd.block

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

but some architectures may not fully support all atomic operations, eg: -march=armv7e-m does not support atomic8(64 bit) OpenAMP/libmetal#301

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK

static inline_function irqstate_t atomic_lock(void)
{
return enter_critical_section();
}

static inline_function void atomic_unlock(irqstate_t flags)
{
leave_critical_section(flags);
}
#else
static inline_function irqstate_t atomic_lock(void)
{
return up_irq_save();
}

static inline_function void atomic_unlock(irqstate_t flags)
{
up_irq_restore(flags);
}
#endif

/****************************************************************************
* Pre-processor Definitions
Expand All @@ -39,23 +65,23 @@
void weak_function __atomic_store_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
\
*(FAR type *)ptr = value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
}

#define LOAD(n, type) \
\
type weak_function __atomic_load_##n (FAR const volatile void *ptr, \
int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
\
type ret = *(FAR type *)ptr; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -64,13 +90,13 @@
type weak_function __atomic_exchange_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
type ret = *tmp; \
*tmp = value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -82,7 +108,7 @@
int success, int failure) \
{ \
bool ret = false; \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmpmem = (FAR type *)mem; \
FAR type *tmpexp = (FAR type *)expect; \
\
Expand All @@ -96,7 +122,7 @@
*tmpexp = *tmpmem; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -105,13 +131,13 @@
type weak_function __atomic_flags_test_and_set##n (FAR volatile void *ptr, \
int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*(FAR type *)ptr = 1; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -120,13 +146,13 @@
type weak_function __atomic_fetch_add_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp + value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -135,13 +161,13 @@
type weak_function __atomic_fetch_sub_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp - value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -150,13 +176,13 @@
type weak_function __atomic_fetch_and_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp & value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -165,13 +191,13 @@
type weak_function __atomic_fetch_or_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp | value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -180,13 +206,13 @@
type weak_function __atomic_fetch_xor_##n (FAR volatile void *ptr, \
type value, int memorder) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
*tmp = *tmp ^ value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -195,12 +221,12 @@
type weak_function __sync_add_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp + value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -209,12 +235,12 @@
type weak_function __sync_sub_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp - value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -223,12 +249,12 @@
type weak_function __sync_or_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp | value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -237,12 +263,12 @@
type weak_function __sync_and_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp & value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -251,12 +277,12 @@
type weak_function __sync_xor_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = *tmp ^ value; \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -265,12 +291,12 @@
type weak_function __sync_nand_and_fetch_##n (FAR volatile void *ptr, \
type value) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
*tmp = ~(*tmp & value); \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return *tmp; \
}

Expand All @@ -281,7 +307,7 @@
type newvalue) \
{ \
bool ret = false; \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
\
if (*tmp == oldvalue) \
Expand All @@ -290,7 +316,7 @@
*tmp = newvalue; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand All @@ -300,7 +326,7 @@
type oldvalue, \
type newvalue) \
{ \
irqstate_t irqstate = spin_lock_irqsave(NULL); \
irqstate_t irqstate = atomic_lock(); \
FAR type *tmp = (FAR type *)ptr; \
type ret = *tmp; \
\
Expand All @@ -309,7 +335,7 @@
*tmp = newvalue; \
} \
\
spin_unlock_irqrestore(NULL, irqstate); \
atomic_unlock(irqstate); \
return ret; \
}

Expand Down
Loading