c1b3b6cab0
New files missing from 58050c28
.
Signed-off-by: Maxim Grigoriev <maxim2405@gmail.com>
Signed-off-by: Peter Korsgaard <jacmet@sunsite.dk>
166 lines
8.7 KiB
Diff
166 lines
8.7 KiB
Diff
--- /dev/null 2008-09-18 06:50:54.356228028 -0700
|
|
+++ uClibc-0.9.29/libc/sysdeps/linux/xtensa/bits/atomic.h 2008-10-04 11:40:21.000000000 -0700
|
|
@@ -0,0 +1,162 @@
|
|
+/*
|
|
+ * Copyright (C) 2008 Tensilica, Inc.
|
|
+ * Contributed by Joe Taylor <joe@tensilica.com> 2008
|
|
+ *
|
|
+ * This file is subject to the terms and conditions of the GNU Lesser General
|
|
+ * Public License. See the file "COPYING.LIB" in the main directory of this
|
|
+ * archive for more details.
|
|
+ */
|
|
+
|
|
+#ifndef _XTENSA_BITS_ATOMIC_H
|
|
+#define _XTENSA_BITS_ATOMIC_H 1
|
|
+
|
|
+/* Xtensa has only a 32-bit form of a store-conditional instruction,
|
|
+ so just stub out the rest. */
|
|
+
|
|
+/* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
|
|
+ Return the old *MEM value. */
|
|
+
|
|
+#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
|
|
+ ({__typeof__(*(mem)) __tmp, __value; \
|
|
+ __asm__ __volatile__( \
|
|
+ "1: l32i %1, %2, 0 \n" \
|
|
+ " bne %1, %4, 2f \n" \
|
|
+ " wsr %1, SCOMPARE1 \n" \
|
|
+ " mov %0, %1 \n" \
|
|
+ " mov %1, %3 \n" \
|
|
+ " s32c1i %1, %2, 0 \n" \
|
|
+ " bne %0, %1, 1b \n" \
|
|
+ "2: \n" \
|
|
+ : "=&a" (__value), "=&a" (__tmp) \
|
|
+ : "a" (mem), "a" (newval), "a" (oldval) \
|
|
+ : "memory" ); \
|
|
+ __tmp; \
|
|
+ })
|
|
+
|
|
+/* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
|
|
+ Return zero if *MEM was changed or non-zero if no exchange happened. */
|
|
+
|
|
+#define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \
|
|
+ ({__typeof__(*(mem)) __tmp, __value; \
|
|
+ __asm__ __volatile__( \
|
|
+ "1: l32i %0, %2, 0 \n" \
|
|
+ " sub %1, %4, %0 \n" \
|
|
+ " bnez %1, 2f \n" \
|
|
+ " wsr %0, SCOMPARE1 \n" \
|
|
+ " mov %1, %3 \n" \
|
|
+ " s32c1i %1, %2, 0 \n" \
|
|
+ " bne %0, %1, 1b \n" \
|
|
+ " movi %1, 0 \n" \
|
|
+ "2: \n" \
|
|
+ : "=&a" (__value), "=&a" (__tmp) \
|
|
+ : "a" (mem), "a" (newval), "a" (oldval) \
|
|
+ : "memory" ); \
|
|
+ __tmp != 0; \
|
|
+ })
|
|
+
|
|
+/* Store NEWVALUE in *MEM and return the old value. */
|
|
+
|
|
+#define __arch_exchange_32_acq(mem, newval) \
|
|
+ ({__typeof__(*(mem)) __tmp, __value; \
|
|
+ __asm__ __volatile__( \
|
|
+ "1: l32i %0, %2, 0 \n" \
|
|
+ " wsr %0, SCOMPARE1 \n" \
|
|
+ " mov %1, %3 \n" \
|
|
+ " s32c1i %1, %2, 0 \n" \
|
|
+ " bne %0, %1, 1b \n" \
|
|
+ : "=&a" (__value), "=&a" (__tmp) \
|
|
+ : "a" (mem), "a" (newval) \
|
|
+ : "memory" ); \
|
|
+ __tmp; \
|
|
+ })
|
|
+
|
|
+/* Add VALUE to *MEM and return the old value of *MEM. */
|
|
+
|
|
+#define __arch_atomic_exchange_and_add_32(mem, value) \
|
|
+ ({__typeof__(*(mem)) __tmp, __value; \
|
|
+ __asm__ __volatile__( \
|
|
+ "1: l32i %0, %2, 0 \n" \
|
|
+ " wsr %0, SCOMPARE1 \n" \
|
|
+ " add %1, %0, %3 \n" \
|
|
+ " s32c1i %1, %2, 0 \n" \
|
|
+ " bne %0, %1, 1b \n" \
|
|
+ : "=&a" (__value), "=&a" (__tmp) \
|
|
+ : "a" (mem), "a" (value) \
|
|
+ : "memory" ); \
|
|
+ __tmp; \
|
|
+ })
|
|
+
|
|
+/* Subtract VALUE from *MEM and return the old value of *MEM. */
|
|
+
|
|
+#define __arch_atomic_exchange_and_sub_32(mem, value) \
|
|
+ ({__typeof__(*(mem)) __tmp, __value; \
|
|
+ __asm__ __volatile__( \
|
|
+ "1: l32i %0, %2, 0 \n" \
|
|
+ " wsr %0, SCOMPARE1 \n" \
|
|
+ " sub %1, %0, %3 \n" \
|
|
+ " s32c1i %1, %2, 0 \n" \
|
|
+ " bne %0, %1, 1b \n" \
|
|
+ : "=&a" (__value), "=&a" (__tmp) \
|
|
+ : "a" (mem), "a" (value) \
|
|
+ : "memory" ); \
|
|
+ __tmp; \
|
|
+ })
|
|
+
|
|
+/* Decrement *MEM if it is > 0, and return the old value. */
|
|
+
|
|
+#define __arch_atomic_decrement_if_positive_32(mem) \
|
|
+ ({__typeof__(*(mem)) __tmp, __value; \
|
|
+ __asm__ __volatile__( \
|
|
+ "1: l32i %0, %2, 0 \n" \
|
|
+ " blti %0, 1, 2f \n" \
|
|
+ " wsr %0, SCOMPARE1 \n" \
|
|
+ " addi %1, %0, -1 \n" \
|
|
+ " s32c1i %1, %2, 0 \n" \
|
|
+ " bne %0, %1, 1b \n" \
|
|
+ "2: \n" \
|
|
+ : "=&a" (__value), "=&a" (__tmp) \
|
|
+ : "a" (mem) \
|
|
+ : "memory" ); \
|
|
+ __tmp; \
|
|
+ })
|
|
+
|
|
+
|
|
+/* These are the preferred public interfaces: */
|
|
+
|
|
+#define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
|
|
+ ({ \
|
|
+ if (sizeof (*mem) != 4) \
|
|
+ abort(); \
|
|
+ __arch_compare_and_exchange_val_32_acq(mem, newval, oldval); \
|
|
+ })
|
|
+
|
|
+#define atomic_exchange_acq(mem, newval) \
|
|
+ ({ \
|
|
+ if (sizeof(*(mem)) != 4) \
|
|
+ abort(); \
|
|
+ __arch_exchange_32_acq(mem, newval); \
|
|
+ })
|
|
+
|
|
+#define atomic_exchange_and_add(mem, newval) \
|
|
+ ({ \
|
|
+ if (sizeof(*(mem)) != 4) \
|
|
+ abort(); \
|
|
+ __arch_atomic_exchange_and_add_32(mem, newval); \
|
|
+ })
|
|
+
|
|
+#define atomic_exchange_and_sub(mem, newval) \
|
|
+ ({ \
|
|
+ if (sizeof(*(mem)) != 4) \
|
|
+ abort(); \
|
|
+ __arch_atomic_exchange_and_sub_32(mem, newval); \
|
|
+ })
|
|
+
|
|
+#define atomic_decrement_if_positive(mem) \
|
|
+ ({ \
|
|
+ if (sizeof(*(mem)) != 4) \
|
|
+ abort(); \
|
|
+ __arch_atomic_decrement_if_positive_32(mem); \
|
|
+ })
|
|
+
|
|
+#endif /* _XTENSA_BITS_ATOMIC_H */
|
|
+
|