feat: added assembly atomic interface

added 32-bit assembly atomic interface:

BREAKING CHANGE:
Assembly implementation:
    ArchAtomicRead
    ArchAtomicSet
    ArchAtomicAdd
    ArchAtomicSub
    ArchAtomicInc
    ArchAtomicIncRet
    ArchAtomicDec
    ArchAtomicDecRet

https://gitee.com/openharmony/kernel_liteos_m/issues/I4O1UC
Signed-off-by: wang-shulin93 <15173259956@163.com>
This commit is contained in:
ou-yangkan 2021-12-31 13:58:12 +08:00
parent 909a18ec8f
commit 1513f50a47
31 changed files with 4288 additions and 602 deletions

View File

@ -40,75 +40,191 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The exchange value.
*
* @retval #INT32 The previous value of the atomic variable
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
return -1;
UINT32 intSave;
intSave = LOS_IntLock();
LOS_IntRestore(intSave);
return *v;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
return -1;
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable, if the value of variable is equal to oldVal.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The new value.
* @param oldVal [IN] The old value.
*
* @retval TRUE The previous value of the atomic variable is not equal to oldVal.
* @retval FALSE The previous value of the atomic variable is equal to oldVal.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
return FALSE;
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicXchg32bits(Atomic *v, INT32 val)
{
INT32 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(Atomic *v, INT32 val, INT32 oldVal)
{
INT32 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus

View File

@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -73,42 +161,6 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
@ -148,6 +200,101 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}

View File

@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -73,42 +161,6 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
@ -148,6 +200,101 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}

View File

@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -73,42 +161,6 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
@ -148,6 +200,101 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}

View File

@ -29,8 +29,8 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef LOS_ATOMIC_H
#define LOS_ATOMIC_H
#ifndef _LOS_ARCH_ATOMIC_H
#define _LOS_ARCH_ATOMIC_H
#include "los_compiler.h"
@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_atomic
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -54,7 +142,7 @@ extern "C" {
*
* @retval #INT32 The previous value of the atomic variable
* @par Dependency:
* <ul><li>los_atomic.h: the header file that contains the API declaration.</li></ul>
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
@ -74,43 +162,7 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
}
/**
* @ingroup los_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_atomic
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
*
* @par Description:
@ -125,7 +177,7 @@ STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
* @retval TRUE The previous value of the atomic variable is not equal to oldVal.
* @retval FALSE The previous value of the atomic variable is equal to oldVal.
* @par Dependency:
* <ul><li>los_atomic.h: the header file that contains the API declaration.</li></ul>
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
@ -148,11 +200,105 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */
#endif /* LOS_ATOMIC_H */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -29,8 +29,8 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef LOS_ATOMIC_H
#define LOS_ATOMIC_H
#ifndef _LOS_ARCH_ATOMIC_H
#define _LOS_ARCH_ATOMIC_H
#include "los_compiler.h"
@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_atomic
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -54,7 +142,7 @@ extern "C" {
*
* @retval #INT32 The previous value of the atomic variable
* @par Dependency:
* <ul><li>los_atomic.h: the header file that contains the API declaration.</li></ul>
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
@ -74,43 +162,7 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
}
/**
* @ingroup los_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_atomic
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
*
* @par Description:
@ -125,7 +177,7 @@ STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
* @retval TRUE The previous value of the atomic variable is not equal to oldVal.
* @retval FALSE The previous value of the atomic variable is equal to oldVal.
* @par Dependency:
* <ul><li>los_atomic.h: the header file that contains the API declaration.</li></ul>
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
@ -148,11 +200,105 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */
#endif /* LOS_ATOMIC_H */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -73,42 +161,6 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
@ -148,6 +200,101 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}

View File

@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -73,42 +161,6 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
@ -148,6 +200,101 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
@ -155,4 +302,3 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
#endif /* __cplusplus */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -73,42 +161,6 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
@ -148,6 +200,101 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
@ -155,4 +302,3 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
#endif /* __cplusplus */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -40,12 +40,100 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldrex %0, [%1]\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 status;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1:ldrex %0, [%2]\n"
" strex %0, %3, [%2]\n"
" teq %0, #0\n"
" beq 1b"
: "=&r"(status), "+m"(*v)
: "r"(v), "r"(setVal)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"add %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 status;
do {
__asm__ __volatile__("ldrex %1, [%2]\n"
"sub %1, %1, %3\n"
"strex %0, %1, [%2]"
: "=&r"(status), "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable and return the previous value of the atomic variable.
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
@ -73,42 +161,6 @@ STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic auto-decrement.
*
* @par Description:
* This API is used to implement the atomic auto-decrement and return the result of auto-decrement.
* @attention
* <ul>
* <li>The pointer v must not be NULL.</li>
* <li>The value which v point to must not be INT_MIN to avoid overflow after reducing 1.</li>
* </ul>
*
* @param v [IN] The addSelf variable pointer.
*
* @retval #INT32 The return value of variable auto-decrement.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicDecRet(volatile INT32 *v)
{
INT32 val = 0;
UINT32 status = 0;
do {
__asm__ __volatile__("ldrex %0, [%3]\n"
"sub %0, %0, #1\n"
"strex %1, %0, [%3]"
: "=&r"(val), "=&r"(status), "+m"(*v)
: "r"(v)
: "cc");
} while (__builtin_expect(status != 0, 0));
return val;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
@ -148,6 +200,101 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
@ -155,4 +302,3 @@ STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 o
#endif /* __cplusplus */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -0,0 +1,300 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _LOS_ARCH_ATOMIC_H
#define _LOS_ARCH_ATOMIC_H
#include "los_compiler.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldw %0, (%1)\n"
: "=&r"(val)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("stw %0, (%1, 0)"
: "=&r"(setVal)
: "r"(v)
: "cc");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldw %0, (%1)\n"
"add %0, %0, %2\n"
"stw %0, (%1, 0)"
: "=&r"(val)
: "r"(v), "r"(addVal)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldw %0, (%1)\n"
"sub %0, %2\n"
"stw %0, (%1, 0)"
: "=&r"(val)
: "r"(v), "r"(subVal)
: "cc");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The exchange value.
*
* @retval #INT32 The previous value of the atomic variable
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
{
INT32 prevVal = 0;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("ldw %0, (%1)\n"
"stw %2, (%1)"
: "=&r"(prevVal)
: "r"(v), "r"(val)
: "cc");
LOS_IntRestore(intSave);
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable, if the value of variable is equal to oldVal.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The new value.
* @param oldVal [IN] The old value.
*
* @retval TRUE The previous value of the atomic variable is not equal to oldVal.
* @retval FALSE The previous value of the atomic variable is equal to oldVal.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
{
INT32 prevVal = 0;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1: ldw %0, (%1)\n"
" cmpne %0, %2\n"
" bt 2f\n"
" stw %3, (%1)\n"
"2:"
: "=&r"(prevVal)
: "r"(v), "r"(oldVal), "r"(val)
: "cc");
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -33,6 +33,7 @@
#define _LOS_ATOMIC_H
#include "los_compiler.h"
#include "los_arch_atomic.h"
#ifdef __cplusplus
#if __cplusplus
@ -40,190 +41,6 @@ extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
typedef volatile INT32 Atomic;
typedef volatile INT64 Atomic64;
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
return *v;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
*v = setVal;
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
#ifndef ARCH_ARM
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicXchg32bits(Atomic *v, INT32 val)
{
INT32 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(Atomic *v, INT32 val, INT32 oldVal)
{
INT32 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#endif
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#define LOS_AtomicRead ArchAtomicRead
#define LOS_AtomicSet ArchAtomicSet
#define LOS_AtomicAdd ArchAtomicAdd

View File

@ -0,0 +1,303 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _LOS_ARCH_ATOMIC_H
#define _LOS_ARCH_ATOMIC_H
#include "los_compiler.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("lr.w %0, (%1)\n"
"fence rw, rw\n"
: "=&r"(val)
: "r"(v)
: "memory");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("amoswap.w %0, %2, (%1)\n"
: "=r"(prevVal)
: "r"(v), "r"(setVal)
: "memory");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("amoadd.w %0, %2, (%1)\n"
"lw %0, (%1)\n"
"fence rw, rw\n"
: "=&r"(val)
: "r"(v), "r"(addVal)
: "memory");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("amoadd.w %0, %2, (%1)\n"
"lw %0, (%1)\n"
"fence rw, rw\n"
: "=&r"(val)
: "r"(v), "r"(-subVal)
: "memory");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The exchange value.
*
* @retval #INT32 The previous value of the atomic variable
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
{
INT32 prevVal = 0;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("lw %0, 0(%1)\n"
"amoswap.w %0, %2, (%1)\n"
: "=&r"(prevVal)
: "r"(v), "r"(val)
: "memory");
LOS_IntRestore(intSave);
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable, if the value of variable is equal to oldVal.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The new value.
* @param oldVal [IN] The old value.
*
* @retval TRUE The previous value of the atomic variable is not equal to oldVal.
* @retval FALSE The previous value of the atomic variable is equal to oldVal.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
{
INT32 prevVal = 0;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1: lw %0, 0(%1)\n"
" bne %0, %2, 2f\n"
" amoswap.w %0, %3, (%1)\n"
"2:"
: "=&r"(prevVal)
: "r"(v), "r"(oldVal), "r"(val)
: "memory");
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -0,0 +1,303 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _LOS_ARCH_ATOMIC_H
#define _LOS_ARCH_ATOMIC_H
#include "los_compiler.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("lr.w %0, (%1)\n"
"fence rw, rw\n"
: "=&r"(val)
: "r"(v)
: "memory");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
UINT32 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("amoswap.w %0, %2, (%1)\n"
: "=r"(prevVal)
: "r"(v), "r"(setVal)
: "memory");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("amoadd.w %0, %2, (%1)\n"
"lw %0, (%1)\n"
"fence rw, rw\n"
: "=&r"(val)
: "r"(v), "r"(addVal)
: "memory");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("amoadd.w %0, %2, (%1)\n"
"lw %0, (%1)\n"
"fence rw, rw\n"
: "=&r"(val)
: "r"(v), "r"(-subVal)
: "memory");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The exchange value.
*
* @retval #INT32 The previous value of the atomic variable
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
{
INT32 prevVal = 0;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("lw %0, 0(%1)\n"
"amoswap.w %0, %2, (%1)\n"
: "=&r"(prevVal)
: "r"(v), "r"(val)
: "memory");
LOS_IntRestore(intSave);
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable, if the value of variable is equal to oldVal.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The new value.
* @param oldVal [IN] The old value.
*
* @retval TRUE The previous value of the atomic variable is not equal to oldVal.
* @retval FALSE The previous value of the atomic variable is equal to oldVal.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
{
INT32 prevVal = 0;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("1: lw %0, 0(%1)\n"
" bne %0, %2, 2f\n"
" amoswap.w %0, %3, (%1)\n"
"2:"
: "=&r"(prevVal)
: "r"(v), "r"(oldVal), "r"(val)
: "memory");
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -0,0 +1,308 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _LOS_ARCH_ATOMIC_H
#define _LOS_ARCH_ATOMIC_H
#include "los_compiler.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
STATIC INLINE INT32 ArchAtomicRead(const Atomic *v)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("l32ai %0, %1, 0\n"
: "=&a"(val)
: "a"(v)
: "memory");
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomicSet(Atomic *v, INT32 setVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("l32ai %0, %2, 0\n"
"wsr %0, SCOMPARE1\n"
"s32c1i %3, %1"
: "=&a"(val), "+m"(*v)
: "a"(v), "a"(setVal)
: "memory");
LOS_IntRestore(intSave);
}
STATIC INLINE INT32 ArchAtomicAdd(Atomic *v, INT32 addVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("l32ai %0, %2, 0\n"
"wsr %0, SCOMPARE1\n"
"add %0, %0, %3\n"
"s32c1i %0, %1\n"
: "=&a"(val), "+m"(*v)
: "a"(v), "a"(addVal)
: "memory");
LOS_IntRestore(intSave);
return *v;
}
STATIC INLINE INT32 ArchAtomicSub(Atomic *v, INT32 subVal)
{
INT32 val;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("l32ai %0, %2, 0\n"
"wsr %0, SCOMPARE1\n"
"sub %0, %0, %3\n"
"s32c1i %0, %1\n"
: "=&a"(val), "+m"(*v)
: "a"(v), "a"(subVal)
: "memory");
LOS_IntRestore(intSave);
return *v;
}
STATIC INLINE VOID ArchAtomicInc(Atomic *v)
{
(VOID)ArchAtomicAdd(v, 1);
}
STATIC INLINE VOID ArchAtomicDec(Atomic *v)
{
(VOID)ArchAtomicSub(v, 1);
}
STATIC INLINE INT32 ArchAtomicIncRet(Atomic *v)
{
return ArchAtomicAdd(v, 1);
}
STATIC INLINE INT32 ArchAtomicDecRet(Atomic *v)
{
return ArchAtomicSub(v, 1);
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable
* and return the previous value of the atomic variable.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The exchange value.
*
* @retval #INT32 The previous value of the atomic variable
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE INT32 ArchAtomicXchg32bits(volatile INT32 *v, INT32 val)
{
INT32 prevVal = 0;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("l32ai %0, %2, 0\n"
"wsr %0, SCOMPARE1\n"
"s32c1i %3, %1\n"
: "=&a"(prevVal), "+m"(*v)
: "a"(v), "a"(val)
: "memory");
LOS_IntRestore(intSave);
return prevVal;
}
/**
* @ingroup los_arch_atomic
* @brief Atomic exchange for 32-bit variable with compare.
*
* @par Description:
* This API is used to implement the atomic exchange for 32-bit variable, if the value of variable is equal to oldVal.
* @attention
* <ul>The pointer v must not be NULL.</ul>
*
* @param v [IN] The variable pointer.
* @param val [IN] The new value.
* @param oldVal [IN] The old value.
*
* @retval TRUE The previous value of the atomic variable is not equal to oldVal.
* @retval FALSE The previous value of the atomic variable is equal to oldVal.
* @par Dependency:
* <ul><li>los_arch_atomic.h: the header file that contains the API declaration.</li></ul>
* @see
*/
STATIC INLINE BOOL ArchAtomicCmpXchg32bits(volatile INT32 *v, INT32 val, INT32 oldVal)
{
INT32 prevVal = 0;
UINT32 intSave;
intSave = LOS_IntLock();
__asm__ __volatile__("l32ai %0, %2, 0\n"
"wsr %0, SCOMPARE1\n"
"bne %0, %3, 2f\n"
"s32c1i %4, %1\n"
"2:\n"
: "=&a"(prevVal), "+m"(*v)
: "a"(v), "a"(oldVal), "a"(val)
: "cc");
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
STATIC INLINE INT64 ArchAtomic64Read(const Atomic64 *v)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Set(Atomic64 *v, INT64 setVal)
{
UINT32 intSave;
intSave = LOS_IntLock();
*v = setVal;
LOS_IntRestore(intSave);
}
STATIC INLINE INT64 ArchAtomic64Add(Atomic64 *v, INT64 addVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v += addVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE INT64 ArchAtomic64Sub(Atomic64 *v, INT64 subVal)
{
INT64 val;
UINT32 intSave;
intSave = LOS_IntLock();
*v -= subVal;
val = *v;
LOS_IntRestore(intSave);
return val;
}
STATIC INLINE VOID ArchAtomic64Inc(Atomic64 *v)
{
(VOID)ArchAtomic64Add(v, 1);
}
STATIC INLINE INT64 ArchAtomic64IncRet(Atomic64 *v)
{
return ArchAtomic64Add(v, 1);
}
STATIC INLINE VOID ArchAtomic64Dec(Atomic64 *v)
{
(VOID)ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomic64DecRet(Atomic64 *v)
{
return ArchAtomic64Sub(v, 1);
}
STATIC INLINE INT64 ArchAtomicXchg64bits(Atomic64 *v, INT64 val)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
*v = val;
LOS_IntRestore(intSave);
return prevVal;
}
STATIC INLINE BOOL ArchAtomicCmpXchg64bits(Atomic64 *v, INT64 val, INT64 oldVal)
{
INT64 prevVal;
UINT32 intSave;
intSave = LOS_IntLock();
prevVal = *v;
if (prevVal == oldVal) {
*v = val;
}
LOS_IntRestore(intSave);
return prevVal != oldVal;
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */
#endif /* _LOS_ARCH_ATOMIC_H */

View File

@ -56,6 +56,7 @@ kernel_module("test_init") {
group("testsuites") {
deps = [
":test_init",
"sample/kernel/atomic:test_atomic",
"sample/kernel/event:test_event",
"sample/kernel/hwi:test_hwi",
"sample/kernel/mem:test_mem",

View File

@ -71,6 +71,7 @@ extern "C" {
#ifndef LOS_KERNEL_TEST_FULL
#define LOS_KERNEL_TEST_FULL 0
#endif
#define LOS_KERNEL_ATOMIC_TEST 1
#define LOS_KERNEL_CORE_TASK_TEST 1
#define LOS_KERNEL_IPC_MUX_TEST 1
#define LOS_KERNEL_IPC_SEM_TEST 1

View File

@ -0,0 +1,45 @@
# Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
# Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
static_library("test_atomic") {
sources = [
"it_los_atomic.c",
"it_los_atomic_001.c",
"it_los_atomic_002.c",
"it_los_atomic_003.c",
"it_los_atomic_004.c",
"it_los_atomic_005.c",
"it_los_atomic_006.c",
"it_los_atomic_007.c",
"it_los_atomic_008.c",
"it_los_atomic_009.c",
]
include_dirs = [ "." ]
configs += [ "//kernel/liteos_m/testsuites:include" ]
}

View File

@ -0,0 +1,61 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
Atomic g_testAtomicID03 = 0;
Atomic64 g_testAtomicID05 = 0;
VOID ItSuiteLosAtomic(VOID)
{
ItLosAtomic001();
ItLosAtomic002();
ItLosAtomic003();
ItLosAtomic004();
ItLosAtomic005();
ItLosAtomic006();
ItLosAtomic007();
ItLosAtomic008();
ItLosAtomic009();
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,66 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _IT_LOS_ATOMIC_H
#define _IT_LOS_ATOMIC_H
#include "osTest.h"
#include "los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
#define ATOMIC_MUTI_TASK_NUM 3
extern Atomic g_testAtomicID03;
extern Atomic64 g_testAtomicID05;
extern VOID ItLosAtomic001(VOID);
extern VOID ItLosAtomic002(VOID);
extern VOID ItLosAtomic003(VOID);
extern VOID ItLosAtomic004(VOID);
extern VOID ItLosAtomic005(VOID);
extern VOID ItLosAtomic006(VOID);
extern VOID ItLosAtomic007(VOID);
extern VOID ItLosAtomic008(VOID);
extern VOID ItLosAtomic009(VOID);
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */
#endif /* _IT_LOS_ATOMIC_H */

View File

@ -0,0 +1,92 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
static UINT32 TestCase(VOID)
{
volatile INT32 value = 0;
UINTPTR ret;
ret = LOS_AtomicDecRet(&value);
ICUNIT_ASSERT_EQUAL(value, ret, value);
ICUNIT_ASSERT_EQUAL(value, -1, value);
ret = LOS_AtomicDecRet(&value);
ICUNIT_ASSERT_EQUAL(value, ret, value);
ICUNIT_ASSERT_EQUAL(value, -0x02, value);
value = 0x7fffffff;
ret = LOS_AtomicDecRet(&value);
ICUNIT_ASSERT_EQUAL(value, ret, value);
ICUNIT_ASSERT_EQUAL(value, 0x7ffffffe, value);
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic001
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_AtomicDecRet
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_AtomicDecRet interface.
* @par TestCase_Expected_Result
* 1.LOS_AtomicDecRet return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic001(VOID)
{
TEST_ADD_CASE("ItLosAtomic001", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,100 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
static UINT32 TestCase(VOID)
{
volatile INT32 value = 0;
UINT32 ret;
UINT32 newVal;
newVal = 0xff;
ret = LOS_AtomicXchg32bits(&value, newVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xff, value);
newVal = 0xffff;
ret = LOS_AtomicXchg32bits(&value, newVal);
ICUNIT_ASSERT_EQUAL(ret, 0xff, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffff, value);
newVal = 0xffffff;
ret = LOS_AtomicXchg32bits(&value, newVal);
ICUNIT_ASSERT_EQUAL(ret, 0xffff, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffff, value);
newVal = 0xffffffff;
ret = LOS_AtomicXchg32bits(&value, newVal);
ICUNIT_ASSERT_EQUAL(ret, 0xffffff, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffff, value);
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic002
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_AtomicXchg32bits
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_AtomicXchg32bits interface.
* @par TestCase_Expected_Result
* 1.LOS_AtomicXchg32bits return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic002(VOID)
{
TEST_ADD_CASE("ItLosAtomic002", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,114 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
static UINT32 TestCase(VOID)
{
volatile INT32 value = 0;
BOOL ret;
UINT32 newVal;
UINT32 oldVal;
newVal = 0xff;
oldVal = 1;
ret = LOS_AtomicCmpXchg32bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 1, ret);
ICUNIT_ASSERT_EQUAL(value, 0, value);
oldVal = 0;
ret = LOS_AtomicCmpXchg32bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xff, value);
newVal = 0xffff;
oldVal = 1;
ret = LOS_AtomicCmpXchg32bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 1, ret);
ICUNIT_ASSERT_EQUAL(value, 0xff, value);
oldVal = 0xff;
ret = LOS_AtomicCmpXchg32bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffff, value);
newVal = 0xffffffff;
oldVal = 1;
ret = LOS_AtomicCmpXchg32bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 1, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffff, value);
oldVal = 0xffff;
ret = LOS_AtomicCmpXchg32bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffff, value);
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic003
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_AtomicCmpXchg32bits
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_AtomicCmpXchg32bits interface.
* @par TestCase_Expected_Result
* 1.LOS_AtomicCmpXchg32bits return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic003(VOID)
{
TEST_ADD_CASE("ItLosAtomic003", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,112 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
static VOID TaskF01(VOID)
{
INT32 i;
UINTPTR ret;
UINTPTR count;
for (i = 0; i < 100; ++i) { // run 100 times.
count = g_testAtomicID03;
ret = LOS_AtomicIncRet(&g_testAtomicID03);
ICUNIT_ASSERT_EQUAL_VOID(ret, g_testAtomicID03, ret);
ICUNIT_ASSERT_EQUAL_VOID((count + 1), g_testAtomicID03, (count + 1));
}
ICUNIT_ASSERT_EQUAL_VOID(g_testAtomicID03, i, g_testAtomicID03);
ICUNIT_ASSERT_EQUAL_VOID(ret, g_testAtomicID03, ret);
LOS_AtomicAdd(&g_testCount, 1);
}
static UINT32 TestCase(VOID)
{
UINT32 ret;
g_testAtomicID03 = 0;
g_testCount = 0;
TSK_INIT_PARAM_S stTask1 = {0};
stTask1.pfnTaskEntry = (TSK_ENTRY_FUNC)TaskF01;
stTask1.pcName = "Atomic_004";
stTask1.uwStackSize = LOSCFG_BASE_CORE_TSK_DEFAULT_STACK_SIZE;
stTask1.usTaskPrio = TASK_PRIO_TEST - 2; // TASK_PRIO_TEST - 2 has higher priority than TASK_PRIO_TEST
stTask1.uwResved = LOS_TASK_STATUS_DETACHED;
ret = LOS_TaskCreate(&g_testTaskID01, &stTask1);
ICUNIT_ASSERT_EQUAL(ret, LOS_OK, ret);
LOS_TaskDelay(5); // delay 5 ticks.
ICUNIT_ASSERT_EQUAL(g_testCount, 1, g_testCount);
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic004
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_AtomicIncRet 100 times.
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_AtomicIncRet interface 100 times in a task.
* @par TestCase_Expected_Result
* 1.LOS_AtomicIncRet return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic004(VOID)
{
TEST_ADD_CASE("ItLosAtomic004", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,110 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
static UINT32 TestCase(VOID)
{
volatile INT64 value = 0;
UINT64 ret;
UINT64 uwNewVal;
uwNewVal = 0xff;
ret = LOS_AtomicXchg64bits(&value, uwNewVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xff, value);
uwNewVal = 0xffff;
ret = LOS_AtomicXchg64bits(&value, uwNewVal);
ICUNIT_ASSERT_EQUAL(ret, 0xff, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffff, value);
uwNewVal = 0xffffff;
ret = LOS_AtomicXchg64bits(&value, uwNewVal);
ICUNIT_ASSERT_EQUAL(ret, 0xffff, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffff, value);
uwNewVal = 0xffffffff;
ret = LOS_AtomicXchg64bits(&value, uwNewVal);
ICUNIT_ASSERT_EQUAL(ret, 0xffffff, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffff, value);
uwNewVal = 0xffffffffffff;
ret = LOS_AtomicXchg64bits(&value, uwNewVal);
ICUNIT_ASSERT_EQUAL(ret, 0xffffffff, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffffffff, value);
uwNewVal = 0xffffffffffffffff;
ret = LOS_AtomicXchg64bits(&value, uwNewVal);
ICUNIT_ASSERT_EQUAL(ret, 0xffffffffffff, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffffffffffff, value);
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic005
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_AtomicXchg64bits
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_AtomicXchg64bits interface.
* @par TestCase_Expected_Result
* 1.LOS_AtomicXchg64bits return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic005(VOID)
{
TEST_ADD_CASE("ItLosAtomic005", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,134 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
static UINT32 TestCase(VOID)
{
volatile INT64 value = 0;
BOOL ret;
UINT64 newVal = 0xff;
UINT64 oldVal = 1;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 1, ret);
ICUNIT_ASSERT_EQUAL(value, 0, value);
oldVal = 0;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xff, value);
newVal = 0xffff;
oldVal = 1;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 1, ret);
ICUNIT_ASSERT_EQUAL(value, 0xff, value);
oldVal = 0xff;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffff, value);
newVal = 0xffffffff;
oldVal = 1;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 1, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffff, value);
oldVal = 0xffff;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffff, value);
newVal = 0xffffffffffff;
oldVal = 1;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 1, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffff, value);
oldVal = 0xffffffff;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffffffff, value);
newVal = 0xffffffffffffffff;
oldVal = 1;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 1, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffffffff, value);
oldVal = 0xffffffffffff;
ret = LOS_AtomicCmpXchg64bits(&value, newVal, oldVal);
ICUNIT_ASSERT_EQUAL(ret, 0, ret);
ICUNIT_ASSERT_EQUAL(value, 0xffffffffffffffff, value);
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic006
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_AtomicCmpXchg64bits
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_AtomicCmpXchg64bits interface.
* @par TestCase_Expected_Result
* 1.LOS_AtomicCmpXchg64bits return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic006(VOID)
{
TEST_ADD_CASE("ItLosAtomic006", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,118 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
static VOID TaskF01(VOID)
{
INT64 i;
for (i = 0x7fffffffffff0000; i < 0x7fffffffffff000f; ++i) {
LOS_AtomicXchg64bits(&g_testAtomicID05, i);
}
++g_testCount;
}
static UINT32 TestCase(VOID)
{
UINT32 ret, i;
UINT32 taskId[ATOMIC_MUTI_TASK_NUM];
TSK_INIT_PARAM_S task[ATOMIC_MUTI_TASK_NUM] = {0, };
CHAR taskName[ATOMIC_MUTI_TASK_NUM][20] = {"", }; // max taskName size is 20.
CHAR buf[10] = ""; // max buf size is 10.
g_testCount = 0;
g_testAtomicID05 = 0;
for (i = 0; i < ATOMIC_MUTI_TASK_NUM; i++) {
memset(buf, 0, 10); // max buf size is 10.
memset(taskName[i], 0, 20); // max taskName size is 20.
task[i].pfnTaskEntry = (TSK_ENTRY_FUNC)TaskF01;
task[i].pcName = taskName[i];
task[i].uwStackSize = LOSCFG_BASE_CORE_TSK_DEFAULT_STACK_SIZE;
task[i].usTaskPrio = TASK_PRIO_TEST - 2; // TASK_PRIO_TEST - 2 has higher priority than TASK_PRIO_TEST
task[i].uwResved = LOS_TASK_STATUS_DETACHED;
}
for (i = 0; i < ATOMIC_MUTI_TASK_NUM; i++) {
ret = LOS_TaskCreate(&taskId[i], &task[i]);
ICUNIT_GOTO_EQUAL(ret, LOS_OK, ret, EXIT);
}
LOS_TaskDelay(20); // delay 20 ticks.
ICUNIT_GOTO_EQUAL(g_testCount, ATOMIC_MUTI_TASK_NUM, g_testCount, EXIT);
ICUNIT_GOTO_EQUAL(g_testAtomicID05, 0x7fffffffffff000e, g_testAtomicID05, EXIT);
EXIT:
for (i = 0; i < ATOMIC_MUTI_TASK_NUM; i++) {
(VOID)LOS_TaskDelete(taskId[i]);
}
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic007
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_AtomicXchg64bits
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_AtomicXchg64bits interface in different task.
* @par TestCase_Expected_Result
* 1.LOS_AtomicXchg64bits return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic007(VOID)
{
TEST_ADD_CASE("ItLosAtomic007", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,121 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
static VOID TaskF01(VOID)
{
INT64 i;
INT64 count;
for (i = 0x7fffffffffff0000; i < 0x7fffffffffffffff; ++i) {
count = g_testAtomicID05;
LOS_AtomicCmpXchg64bits(&g_testAtomicID05, i, count);
}
++g_testCount;
}
static UINT32 TestCase(VOID)
{
UINT32 ret, i;
UINT32 taskId[ATOMIC_MUTI_TASK_NUM];
TSK_INIT_PARAM_S task[ATOMIC_MUTI_TASK_NUM] = {0, };
CHAR taskName[ATOMIC_MUTI_TASK_NUM][20] = {"", }; // max taskName size is 20.
CHAR buf[10] = ""; // max buf size is 10.
g_testCount = 0;
g_testAtomicID05 = 0;
for (i = 0; i < ATOMIC_MUTI_TASK_NUM; i++) {
memset(buf, 0, 10); // max buf size is 10.
memset(taskName[i], 0, 20); // max taskName size is 20.
task[i].pfnTaskEntry = (TSK_ENTRY_FUNC)TaskF01;
task[i].pcName = taskName[i];
task[i].uwStackSize = LOSCFG_BASE_CORE_TSK_DEFAULT_STACK_SIZE;
task[i].usTaskPrio = TASK_PRIO_TEST - 2; // TASK_PRIO_TEST - 2 has higher priority than TASK_PRIO_TEST
task[i].uwResved = LOS_TASK_STATUS_DETACHED;
}
for (i = 0; i < ATOMIC_MUTI_TASK_NUM; i++) {
ret = LOS_TaskCreate(&taskId[i], &task[i]);
ICUNIT_GOTO_EQUAL(ret, LOS_OK, ret, EXIT);
}
LOS_TaskDelay(80); // delay 80 ticks.
ICUNIT_GOTO_EQUAL(g_testCount, ATOMIC_MUTI_TASK_NUM, g_testCount, EXIT);
ICUNIT_GOTO_EQUAL(g_testAtomicID05, (0x7fffffffffffffff - 1), g_testAtomicID05, EXIT);
EXIT:
for (i = 0; i < ATOMIC_MUTI_TASK_NUM; i++) {
(VOID)LOS_TaskDelete(taskId[i]);
}
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic008
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_AtomicCmpXchg64bits
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_AtomicCmpXchg64bits interface in different task.
* @par TestCase_Expected_Result
* 1.LOS_AtomicCmpXchg64bits return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic008(VOID)
{
TEST_ADD_CASE("ItLosAtomic008", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -0,0 +1,140 @@
/*
* Copyright (c) 2013-2019 Huawei Technologies Co., Ltd. All rights reserved.
* Copyright (c) 2020-2021 Huawei Device Co., Ltd. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "it_los_atomic.h"
#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif /* __cplusplus */
#endif /* __cplusplus */
#define DB_ATOMIC_MUTI_TASK_NUM (ATOMIC_MUTI_TASK_NUM * 2) // 2 is coefficients
static VOID TaskF01(VOID)
{
UINT32 i;
for (i = 0; i < 10; ++i) { // run 10 times.
LOS_Atomic64Dec(&g_testAtomicID05);
}
++g_testCount;
}
static VOID TaskF02(VOID)
{
UINT64 i;
for (i = 0; i < 10; ++i) { // run 10 times.
LOS_AtomicCmpXchg64bits(&g_testAtomicID05, g_testAtomicID05, g_testAtomicID05);
}
++g_testCount;
}
static UINT32 TestCase(VOID)
{
UINT32 ret, i;
UINT32 taskId[DB_ATOMIC_MUTI_TASK_NUM];
TSK_INIT_PARAM_S task[DB_ATOMIC_MUTI_TASK_NUM] = {0, };
CHAR taskName[DB_ATOMIC_MUTI_TASK_NUM][20] = {"", }; // max taskName size is 20.
CHAR buf[10] = ""; // max buf size is 10.
Atomic uCount = 0;
g_testCount = 0;
g_testAtomicID05 = 0xffffffffff;
UINT32 uLoop = g_taskMaxNum - TASK_EXISTED_NUM;
if (uLoop > DB_ATOMIC_MUTI_TASK_NUM) {
uLoop = DB_ATOMIC_MUTI_TASK_NUM;
}
for (i = 0; i < uLoop; i++) {
memset(buf, 0, 10); // max buf size is 10.
memset(taskName[i], 0, 20); // max taskName size is 20.
if (i % 2 == 0) { // 2 is index.
uCount++;
task[i].pfnTaskEntry = (TSK_ENTRY_FUNC)TaskF01;
} else {
task[i].pfnTaskEntry = (TSK_ENTRY_FUNC)TaskF02;
}
task[i].pcName = taskName[i];
task[i].uwStackSize = LOSCFG_BASE_CORE_TSK_DEFAULT_STACK_SIZE;
task[i].usTaskPrio = TASK_PRIO_TEST - 2; // TASK_PRIO_TEST - 2 has higher priority than TASK_PRIO_TEST
task[i].uwResved = LOS_TASK_STATUS_DETACHED;
}
for (i = 0; i < uLoop; i++) {
ret = LOS_TaskCreate(&taskId[i], &task[i]);
ICUNIT_GOTO_EQUAL(ret, LOS_OK, ret, EXIT);
}
LOS_TaskDelay(20); // delay 20 ticks.
ICUNIT_GOTO_EQUAL(g_testCount, uLoop, g_testCount, EXIT);
ICUNIT_GOTO_EQUAL(g_testAtomicID05, (0xffffffffff - 10 * uCount), g_testAtomicID05, EXIT); // run 10 times.
EXIT:
for (i = 0; i < uLoop; i++) {
(VOID)LOS_TaskDelete(taskId[i]);
}
return LOS_OK;
}
/**
* @ingroup TEST_ATO
* @par TestCase_Number
* ItLosAtomic009
* @par TestCase_TestCase_Type
* Function test
* @brief Test interface LOS_Atomic64Dec and LOS_AtomicCmpXchg64bits in different task.
* @par TestCase_Pretreatment_Condition
* NA.
* @par TestCase_Test_Steps
* step1: Invoke the LOS_Atomic64Dec interface in TaskF01.
* step2: Invoke the LOS_AtomicCmpXchg64bits interface in TaskF02.
* @par TestCase_Expected_Result
* 1.LOS_Atomic64Dec and LOS_AtomicCmpXchg64bits return expected result.
* @par TestCase_Level
* Level 0
* @par TestCase_Automated
* true
* @par TestCase_Remark
* null
*/
VOID ItLosAtomic009(VOID)
{
TEST_ADD_CASE("ItLosAtomic009", TestCase, TEST_LOS, TEST_ATO, TEST_LEVEL0, TEST_FUNCTION);
}
#ifdef __cplusplus
#if __cplusplus
}
#endif /* __cplusplus */
#endif /* __cplusplus */

View File

@ -136,6 +136,9 @@ UINT32 TaskUsedCountGet(VOID)
void TestKernel(void)
{
#if (LOS_KERNEL_ATOMIC_TEST == 1)
ItSuiteLosAtomic();
#endif
#if (LOS_KERNEL_CORE_TASK_TEST == 1)
ItSuiteLosTask();
#endif

View File

@ -321,6 +321,9 @@ typedef signed long long INT64;
typedef unsigned int UINTPTR;
typedef signed int INTPTR;
typedef volatile INT32 Atomic;
typedef volatile INT64 Atomic64;
#define VOID void
#ifndef FALSE