svn commit: r197824 - in head/sys: amd64/include i386/include
Attilio Rao
attilio at FreeBSD.org
Tue Oct 6 23:48:29 UTC 2009
Author: attilio
Date: Tue Oct 6 23:48:28 2009
New Revision: 197824
URL: http://svn.freebsd.org/changeset/base/197824
Log:
- All the functions in atomic.h needs to be in "physical" form (like
not defined through macros or similar) in order to be later compiled in
the kernel and offer this way the support for modules (and
compatibility among the UP case and SMP case).
Fix this for the newly introduced atomic_cmpset_barr_* cases by defining
and specifying a template. Note that the new DEFINE_CMPSET_GEN()
template save more typing on amd64 than the current code. [1]
- Fix the style for memory barriers on amd64.
[1] Reported by: Paul B. Mahol <onemda at gmail dot com>
Modified:
head/sys/amd64/include/atomic.h
head/sys/i386/include/atomic.h
Modified: head/sys/amd64/include/atomic.h
==============================================================================
--- head/sys/amd64/include/atomic.h Tue Oct 6 22:21:51 2009 (r197823)
+++ head/sys/amd64/include/atomic.h Tue Oct 6 23:48:28 2009 (r197824)
@@ -32,9 +32,9 @@
#error this file needs sys/cdefs.h as a prerequisite
#endif
-#define mb() __asm__ __volatile__ ("mfence;": : :"memory")
-#define wmb() __asm__ __volatile__ ("sfence;": : :"memory")
-#define rmb() __asm__ __volatile__ ("lfence;": : :"memory")
+#define mb() __asm __volatile("mfence;" : : : "memory")
+#define wmb() __asm __volatile("sfence;" : : : "memory")
+#define rmb() __asm __volatile("lfence;" : : : "memory")
/*
* Various simple operations on memory, each of which is atomic in the
@@ -131,50 +131,33 @@ struct __hack
* Returns 0 on failure, non-zero on success
*/
-static __inline int
-atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
-{
- u_char res;
-
- __asm __volatile(
- " " MPLOCKED " "
- " cmpxchgl %2,%1 ; "
- " sete %0 ; "
- "1: "
- "# atomic_cmpset_int"
- : "=a" (res), /* 0 */
- "=m" (*dst) /* 1 */
- : "r" (src), /* 2 */
- "a" (exp), /* 3 */
- "m" (*dst) /* 4 */
- : "memory");
-
- return (res);
-}
-
-static __inline int
-atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
-{
- u_char res;
-
- __asm __volatile(
- " " MPLOCKED " "
- " cmpxchgq %2,%1 ; "
- " sete %0 ; "
- "1: "
- "# atomic_cmpset_long"
- : "=a" (res), /* 0 */
- "=m" (*dst) /* 1 */
- : "r" (src), /* 2 */
- "a" (exp), /* 3 */
- "m" (*dst) /* 4 */
- : "memory");
-
- return (res);
-}
+#define DEFINE_CMPSET_GEN(NAME, TYPE, OP) \
+static __inline int \
+atomic_cmpset_##NAME(volatile u_##TYPE *dst, u_##TYPE exp, u_##TYPE src)\
+{ \
+ u_char res; \
+ \
+ __asm __volatile( \
+ " " MPLOCKED " " \
+ " " OP " %2,%1 ; " \
+ " sete %0 ; " \
+ "1: " \
+ "# atomic_cmpset_##NAME" \
+ : "=a" (res), /* 0 */ \
+ "=m" (*dst) /* 1 */ \
+ : "r" (src), /* 2 */ \
+ "a" (exp), /* 3 */ \
+ "m" (*dst) /* 4 */ \
+ : "memory"); \
+ \
+ return (res); \
+} \
+struct __hack
-#define atomic_cmpset_barr_int atomic_cmpset_int
-#define atomic_cmpset_barr_long atomic_cmpset_long
+DEFINE_CMPSET_GEN(int, int, "cmpxchgl");
+DEFINE_CMPSET_GEN(long, long, "cmpxchgq");
+DEFINE_CMPSET_GEN(barr_int, int, "cmpxchgl");
+DEFINE_CMPSET_GEN(barr_long, long, "cmpxchgq");
/*
* Atomically add the value of v to the integer pointed to by p and return
Modified: head/sys/i386/include/atomic.h
==============================================================================
--- head/sys/i386/include/atomic.h Tue Oct 6 22:21:51 2009 (r197823)
+++ head/sys/i386/include/atomic.h Tue Oct 6 23:48:28 2009 (r197824)
@@ -130,57 +130,62 @@ struct __hack
#ifdef CPU_DISABLE_CMPXCHG
-static __inline int
-atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
-{
- u_char res;
-
- __asm __volatile(
- " pushfl ; "
- " cli ; "
- " cmpl %3,%4 ; "
- " jne 1f ; "
- " movl %2,%1 ; "
- "1: "
- " sete %0 ; "
- " popfl ; "
- "# atomic_cmpset_int"
- : "=q" (res), /* 0 */
- "=m" (*dst) /* 1 */
- : "r" (src), /* 2 */
- "r" (exp), /* 3 */
- "m" (*dst) /* 4 */
- : "memory");
-
- return (res);
-}
+#define DEFINE_CMPSET_GEN(NAME) \
+static __inline int \
+atomic_cmpset_##NAME(volatile u_int *dst, u_int exp, u_int src)\
+{ \
+ u_char res; \
+ \
+ __asm __volatile( \
+ " pushfl ; " \
+ " cli ; " \
+ " cmpl %3,%4 ; " \
+ " jne 1f ; " \
+ " movl %2,%1 ; " \
+ "1: " \
+ " sete %0 ; " \
+ " popfl ; " \
+ "# atomic_cmpset_##NAME" \
+ : "=q" (res), /* 0 */ \
+ "=m" (*dst) /* 1 */ \
+ : "r" (src), /* 2 */ \
+ "r" (exp), /* 3 */ \
+ "m" (*dst) /* 4 */ \
+ : "memory"); \
+ \
+ return (res); \
+} \
+struct __hack
#else /* !CPU_DISABLE_CMPXCHG */
-static __inline int
-atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
-{
- u_char res;
-
- __asm __volatile(
- " " MPLOCKED " "
- " cmpxchgl %2,%1 ; "
- " sete %0 ; "
- "1: "
- "# atomic_cmpset_int"
- : "=a" (res), /* 0 */
- "=m" (*dst) /* 1 */
- : "r" (src), /* 2 */
- "a" (exp), /* 3 */
- "m" (*dst) /* 4 */
- : "memory");
-
- return (res);
-}
+#define DEFINE_CMPSET_GEN(NAME) \
+static __inline int \
+atomic_cmpset_##NAME(volatile u_int *dst, u_int exp, u_int src)\
+{ \
+ u_char res; \
+ \
+ __asm __volatile( \
+ " " MPLOCKED " " \
+ " cmpxchgl %2,%1 ; " \
+ " sete %0 ; " \
+ "1: " \
+ "# atomic_cmpset_##NAME" \
+ : "=a" (res), /* 0 */ \
+ "=m" (*dst) /* 1 */ \
+ : "r" (src), /* 2 */ \
+ "a" (exp), /* 3 */ \
+ "m" (*dst) /* 4 */ \
+ : "memory"); \
+ \
+ return (res); \
+} \
+struct __hack
#endif /* CPU_DISABLE_CMPXCHG */
-#define atomic_cmpset_barr_int atomic_cmpset_int
+DEFINE_CMPSET_GEN(int);
+DEFINE_CMPSET_GEN(barr_int);
/*
* Atomically add the value of v to the integer pointed to by p and return
More information about the svn-src-all
mailing list