* [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h
@ 2013-12-14 6:25 Qiaowei Ren
2013-12-14 6:25 ` [PATCH 2/2] x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic Qiaowei Ren
` (3 more replies)
0 siblings, 4 replies; 6+ messages in thread
From: Qiaowei Ren @ 2013-12-14 6:25 UTC (permalink / raw)
To: H. Peter Anvin, Thomas Gleixner, Ingo Molnar
Cc: x86, linux-kernel, Qiaowei Ren
This patch adds user_atomic_cmpxchg_inatomic() to use CMPXCHG
instruction against a user space address.
This generalizes the already existing futex_atomic_cmpxchg_inatomic()
so it can be used in other contexts. This will be used in the
upcoming support for Intel MPX (Memory Protection Extensions.)
Signed-off-by: Qiaowei Ren <qiaowei.ren@intel.com>
---
arch/x86/include/asm/uaccess.h | 91 ++++++++++++++++++++++++++++++++++++++++
1 files changed, 91 insertions(+), 0 deletions(-)
diff --git a/arch/x86/include/asm/uaccess.h b/arch/x86/include/asm/uaccess.h
index 5838fa9..894d8bf 100644
--- a/arch/x86/include/asm/uaccess.h
+++ b/arch/x86/include/asm/uaccess.h
@@ -525,6 +525,97 @@ extern __must_check long strnlen_user(const char __user *str, long n);
unsigned long __must_check clear_user(void __user *mem, unsigned long len);
unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
+extern void __cmpxchg_wrong_size(void)
+ __compiletime_error("Bad argument size for cmpxchg");
+
+#define __user_atomic_cmpxchg_inatomic(uval, ptr, old, new, size) \
+({ \
+ int __ret = 0; \
+ __typeof__(ptr) __uval = (uval); \
+ __typeof__(*(ptr)) __old = (old); \
+ __typeof__(*(ptr)) __new = (new); \
+ switch (size) { \
+ case 1: \
+ { \
+ asm volatile("\t" ASM_STAC "\n" \
+ "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \
+ "2:\t" ASM_CLAC "\n" \
+ "\t.section .fixup, \"ax\"\n" \
+ "3:\tmov %3, %0\n" \
+ "\tjmp 2b\n" \
+ "\t.previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
+ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
+ : "i" (-EFAULT), "q" (__new), "1" (__old) \
+ : "memory" \
+ ); \
+ break; \
+ } \
+ case 2: \
+ { \
+ asm volatile("\t" ASM_STAC "\n" \
+ "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \
+ "2:\t" ASM_CLAC "\n" \
+ "\t.section .fixup, \"ax\"\n" \
+ "3:\tmov %3, %0\n" \
+ "\tjmp 2b\n" \
+ "\t.previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
+ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
+ : "i" (-EFAULT), "r" (__new), "1" (__old) \
+ : "memory" \
+ ); \
+ break; \
+ } \
+ case 4: \
+ { \
+ asm volatile("\t" ASM_STAC "\n" \
+ "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \
+ "2:\t" ASM_CLAC "\n" \
+ "\t.section .fixup, \"ax\"\n" \
+ "3:\tmov %3, %0\n" \
+ "\tjmp 2b\n" \
+ "\t.previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
+ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
+ : "i" (-EFAULT), "r" (__new), "1" (__old) \
+ : "memory" \
+ ); \
+ break; \
+ } \
+#ifdef CONFIG_X86_64 \
+ case 8: \
+ { \
+ asm volatile("\t" ASM_STAC "\n" \
+ "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
+ "2:\t" ASM_CLAC "\n" \
+ "\t.section .fixup, \"ax\"\n" \
+ "3:\tmov %3, %0\n" \
+ "\tjmp 2b\n" \
+ "\t.previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
+ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
+ : "i" (-EFAULT), "r" (__new), "1" (__old) \
+ : "memory" \
+ ); \
+ break; \
+ } \
+#endif \
+ default: \
+ __cmpxchg_wrong_size(); \
+ } \
+ *__uval = __old; \
+ __ret; \
+})
+
+#define user_atomic_cmpxchg_inatomic(uval, ptr, old, new) \
+({ \
+ access_ok(VERIFY_WRITE, (ptr), sizeof(*(ptr))) ? \
+ __user_atomic_cmpxchg_inatomic((uval), (ptr), \
+ (old), (new), sizeof(*(ptr))) : \
+ -EFAULT; \
+})
+
/*
* movsl can be slow when source and dest are not both 8-byte aligned
*/
--
1.7.1
^ permalink raw reply related [flat|nested] 6+ messages in thread* [PATCH 2/2] x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic
2013-12-14 6:25 [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h Qiaowei Ren
@ 2013-12-14 6:25 ` Qiaowei Ren
2013-12-16 17:24 ` [tip:x86/mpx] " tip-bot for Qiaowei Ren
2013-12-16 12:21 ` [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h Ren, Qiaowei
` (2 subsequent siblings)
3 siblings, 1 reply; 6+ messages in thread
From: Qiaowei Ren @ 2013-12-14 6:25 UTC (permalink / raw)
To: H. Peter Anvin, Thomas Gleixner, Ingo Molnar
Cc: x86, linux-kernel, Qiaowei Ren
futex_atomic_cmpxchg_inatomic() is only the 32bit implementation of
user_atomic_cmpxchg_inatomic(). This patch replaces it with
user_atomic_cmpxchg_inatomic().
Signed-off-by: Qiaowei Ren <qiaowei.ren@intel.com>
---
arch/x86/include/asm/futex.h | 27 ++-------------------------
1 files changed, 2 insertions(+), 25 deletions(-)
diff --git a/arch/x86/include/asm/futex.h b/arch/x86/include/asm/futex.h
index be27ba1..a9f7de4 100644
--- a/arch/x86/include/asm/futex.h
+++ b/arch/x86/include/asm/futex.h
@@ -41,6 +41,8 @@
"+m" (*uaddr), "=&r" (tem) \
: "r" (oparg), "i" (-EFAULT), "1" (0))
+#define futex_atomic_cmpxchg_inatomic user_atomic_cmpxchg_inatomic
+
static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
{
int op = (encoded_op >> 28) & 7;
@@ -107,30 +109,5 @@ static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
return ret;
}
-static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
- u32 oldval, u32 newval)
-{
- int ret = 0;
-
- if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
- return -EFAULT;
-
- asm volatile("\t" ASM_STAC "\n"
- "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
- "2:\t" ASM_CLAC "\n"
- "\t.section .fixup, \"ax\"\n"
- "3:\tmov %3, %0\n"
- "\tjmp 2b\n"
- "\t.previous\n"
- _ASM_EXTABLE(1b, 3b)
- : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
- : "i" (-EFAULT), "r" (newval), "1" (oldval)
- : "memory"
- );
-
- *uval = oldval;
- return ret;
-}
-
#endif
#endif /* _ASM_X86_FUTEX_H */
--
1.7.1
^ permalink raw reply related [flat|nested] 6+ messages in thread* [tip:x86/mpx] x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic
2013-12-14 6:25 ` [PATCH 2/2] x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic Qiaowei Ren
@ 2013-12-16 17:24 ` tip-bot for Qiaowei Ren
0 siblings, 0 replies; 6+ messages in thread
From: tip-bot for Qiaowei Ren @ 2013-12-16 17:24 UTC (permalink / raw)
To: linux-tip-commits
Cc: linux-kernel, qiaowei.ren, hpa, mingo, a.p.zijlstra, tglx, hpa
Commit-ID: 0ee3b6f87d4d748d5362cb47ff33fa1553805cb4
Gitweb: http://git.kernel.org/tip/0ee3b6f87d4d748d5362cb47ff33fa1553805cb4
Author: Qiaowei Ren <qiaowei.ren@intel.com>
AuthorDate: Sat, 14 Dec 2013 14:25:03 +0800
Committer: H. Peter Anvin <hpa@linux.intel.com>
CommitDate: Mon, 16 Dec 2013 09:08:13 -0800
x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic
futex_atomic_cmpxchg_inatomic() is simply the 32-bit implementation of
user_atomic_cmpxchg_inatomic(), which in turn is simply a
generalization of the original code in
futex_atomic_cmpxchg_inatomic().
Use the newly generalized user_atomic_cmpxchg_inatomic() as the futex
implementation, too.
[ hpa: retain the inline in futex.h rather than changing it to a macro ]
Signed-off-by: Qiaowei Ren <qiaowei.ren@intel.com>
Link: http://lkml.kernel.org/r/1387002303-6620-2-git-send-email-qiaowei.ren@intel.com
Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Cc: Peter Zijlstra <a.p.zijlstra@chello.nl>
---
arch/x86/include/asm/futex.h | 21 +--------------------
1 file changed, 1 insertion(+), 20 deletions(-)
diff --git a/arch/x86/include/asm/futex.h b/arch/x86/include/asm/futex.h
index be27ba1..b4c1f54 100644
--- a/arch/x86/include/asm/futex.h
+++ b/arch/x86/include/asm/futex.h
@@ -110,26 +110,7 @@ static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
u32 oldval, u32 newval)
{
- int ret = 0;
-
- if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
- return -EFAULT;
-
- asm volatile("\t" ASM_STAC "\n"
- "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
- "2:\t" ASM_CLAC "\n"
- "\t.section .fixup, \"ax\"\n"
- "3:\tmov %3, %0\n"
- "\tjmp 2b\n"
- "\t.previous\n"
- _ASM_EXTABLE(1b, 3b)
- : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
- : "i" (-EFAULT), "r" (newval), "1" (oldval)
- : "memory"
- );
-
- *uval = oldval;
- return ret;
+ return user_atomic_cmpxchg_inatomic(uval, uaddr, oldval, newval);
}
#endif
^ permalink raw reply related [flat|nested] 6+ messages in thread
* RE: [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h
2013-12-14 6:25 [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h Qiaowei Ren
2013-12-14 6:25 ` [PATCH 2/2] x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic Qiaowei Ren
@ 2013-12-16 12:21 ` Ren, Qiaowei
2013-12-16 16:55 ` H. Peter Anvin
2013-12-16 17:24 ` [tip:x86/mpx] " tip-bot for Qiaowei Ren
3 siblings, 0 replies; 6+ messages in thread
From: Ren, Qiaowei @ 2013-12-16 12:21 UTC (permalink / raw)
To: H. Peter Anvin, Thomas Gleixner, Ingo Molnar
Cc: x86@kernel.org, linux-kernel@vger.kernel.org
Qiaowei Ren wrote:
> This patch adds user_atomic_cmpxchg_inatomic() to use CMPXCHG instruction
> against a user space address.
>
> This generalizes the already existing futex_atomic_cmpxchg_inatomic() so it
> can be used in other contexts. This will be used in the upcoming support for
> Intel MPX (Memory Protection Extensions.)
>
Peter, do you have any other comments for this version?
Thanks,
Qiaowei
^ permalink raw reply [flat|nested] 6+ messages in thread
* Re: [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h
2013-12-14 6:25 [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h Qiaowei Ren
2013-12-14 6:25 ` [PATCH 2/2] x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic Qiaowei Ren
2013-12-16 12:21 ` [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h Ren, Qiaowei
@ 2013-12-16 16:55 ` H. Peter Anvin
2013-12-16 17:24 ` [tip:x86/mpx] " tip-bot for Qiaowei Ren
3 siblings, 0 replies; 6+ messages in thread
From: H. Peter Anvin @ 2013-12-16 16:55 UTC (permalink / raw)
To: Qiaowei Ren, Thomas Gleixner, Ingo Molnar; +Cc: x86, linux-kernel
On 12/13/2013 10:25 PM, Qiaowei Ren wrote:
> +#ifdef CONFIG_X86_64 \
> + case 8: \
> + { \
> + asm volatile("\t" ASM_STAC "\n" \
> + "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
> + "2:\t" ASM_CLAC "\n" \
> + "\t.section .fixup, \"ax\"\n" \
> + "3:\tmov %3, %0\n" \
> + "\tjmp 2b\n" \
> + "\t.previous\n" \
> + _ASM_EXTABLE(1b, 3b) \
> + : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
> + : "i" (-EFAULT), "r" (__new), "1" (__old) \
> + : "memory" \
> + ); \
> + break; \
> + } \
> +#endif \
This, of course, doesn't work because this is embedded in a macro. I
realize I requested it, but you should still have tested it.
Anyway, I'll fix it up.
-hpa
^ permalink raw reply [flat|nested] 6+ messages in thread* [tip:x86/mpx] x86: add user_atomic_cmpxchg_inatomic at uaccess.h
2013-12-14 6:25 [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h Qiaowei Ren
` (2 preceding siblings ...)
2013-12-16 16:55 ` H. Peter Anvin
@ 2013-12-16 17:24 ` tip-bot for Qiaowei Ren
3 siblings, 0 replies; 6+ messages in thread
From: tip-bot for Qiaowei Ren @ 2013-12-16 17:24 UTC (permalink / raw)
To: linux-tip-commits
Cc: linux-kernel, qiaowei.ren, hpa, mingo, a.p.zijlstra, tglx, hpa
Commit-ID: f09174c501f8bb259788cc36d5a7aa5b2831fb5e
Gitweb: http://git.kernel.org/tip/f09174c501f8bb259788cc36d5a7aa5b2831fb5e
Author: Qiaowei Ren <qiaowei.ren@intel.com>
AuthorDate: Sat, 14 Dec 2013 14:25:02 +0800
Committer: H. Peter Anvin <hpa@linux.intel.com>
CommitDate: Mon, 16 Dec 2013 09:07:57 -0800
x86: add user_atomic_cmpxchg_inatomic at uaccess.h
This patch adds user_atomic_cmpxchg_inatomic() to use CMPXCHG
instruction against a user space address.
This generalizes the already existing futex_atomic_cmpxchg_inatomic()
so it can be used in other contexts. This will be used in the
upcoming support for Intel MPX (Memory Protection Extensions.)
[ hpa: replaced #ifdef inside a macro with IS_ENABLED() ]
Signed-off-by: Qiaowei Ren <qiaowei.ren@intel.com>
Link: http://lkml.kernel.org/r/1387002303-6620-1-git-send-email-qiaowei.ren@intel.com
Signed-off-by: H. Peter Anvin <hpa@linux.intel.com>
Cc: Peter Zijlstra <a.p.zijlstra@chello.nl>
---
arch/x86/include/asm/uaccess.h | 92 ++++++++++++++++++++++++++++++++++++++++++
1 file changed, 92 insertions(+)
diff --git a/arch/x86/include/asm/uaccess.h b/arch/x86/include/asm/uaccess.h
index 8ec57c0..48ff838 100644
--- a/arch/x86/include/asm/uaccess.h
+++ b/arch/x86/include/asm/uaccess.h
@@ -525,6 +525,98 @@ extern __must_check long strnlen_user(const char __user *str, long n);
unsigned long __must_check clear_user(void __user *mem, unsigned long len);
unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
+extern void __cmpxchg_wrong_size(void)
+ __compiletime_error("Bad argument size for cmpxchg");
+
+#define __user_atomic_cmpxchg_inatomic(uval, ptr, old, new, size) \
+({ \
+ int __ret = 0; \
+ __typeof__(ptr) __uval = (uval); \
+ __typeof__(*(ptr)) __old = (old); \
+ __typeof__(*(ptr)) __new = (new); \
+ switch (size) { \
+ case 1: \
+ { \
+ asm volatile("\t" ASM_STAC "\n" \
+ "1:\t" LOCK_PREFIX "cmpxchgb %4, %2\n" \
+ "2:\t" ASM_CLAC "\n" \
+ "\t.section .fixup, \"ax\"\n" \
+ "3:\tmov %3, %0\n" \
+ "\tjmp 2b\n" \
+ "\t.previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
+ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
+ : "i" (-EFAULT), "q" (__new), "1" (__old) \
+ : "memory" \
+ ); \
+ break; \
+ } \
+ case 2: \
+ { \
+ asm volatile("\t" ASM_STAC "\n" \
+ "1:\t" LOCK_PREFIX "cmpxchgw %4, %2\n" \
+ "2:\t" ASM_CLAC "\n" \
+ "\t.section .fixup, \"ax\"\n" \
+ "3:\tmov %3, %0\n" \
+ "\tjmp 2b\n" \
+ "\t.previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
+ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
+ : "i" (-EFAULT), "r" (__new), "1" (__old) \
+ : "memory" \
+ ); \
+ break; \
+ } \
+ case 4: \
+ { \
+ asm volatile("\t" ASM_STAC "\n" \
+ "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" \
+ "2:\t" ASM_CLAC "\n" \
+ "\t.section .fixup, \"ax\"\n" \
+ "3:\tmov %3, %0\n" \
+ "\tjmp 2b\n" \
+ "\t.previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
+ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
+ : "i" (-EFAULT), "r" (__new), "1" (__old) \
+ : "memory" \
+ ); \
+ break; \
+ } \
+ case 8: \
+ { \
+ if (!IS_ENABLED(CONFIG_X86_64)) \
+ __cmpxchg_wrong_size(); \
+ \
+ asm volatile("\t" ASM_STAC "\n" \
+ "1:\t" LOCK_PREFIX "cmpxchgq %4, %2\n" \
+ "2:\t" ASM_CLAC "\n" \
+ "\t.section .fixup, \"ax\"\n" \
+ "3:\tmov %3, %0\n" \
+ "\tjmp 2b\n" \
+ "\t.previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
+ : "+r" (__ret), "=a" (__old), "+m" (*(ptr)) \
+ : "i" (-EFAULT), "r" (__new), "1" (__old) \
+ : "memory" \
+ ); \
+ break; \
+ } \
+ default: \
+ __cmpxchg_wrong_size(); \
+ } \
+ *__uval = __old; \
+ __ret; \
+})
+
+#define user_atomic_cmpxchg_inatomic(uval, ptr, old, new) \
+({ \
+ access_ok(VERIFY_WRITE, (ptr), sizeof(*(ptr))) ? \
+ __user_atomic_cmpxchg_inatomic((uval), (ptr), \
+ (old), (new), sizeof(*(ptr))) : \
+ -EFAULT; \
+})
+
/*
* movsl can be slow when source and dest are not both 8-byte aligned
*/
^ permalink raw reply related [flat|nested] 6+ messages in thread
end of thread, other threads:[~2013-12-16 17:24 UTC | newest]
Thread overview: 6+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2013-12-14 6:25 [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h Qiaowei Ren
2013-12-14 6:25 ` [PATCH 2/2] x86: replace futex_atomic_cmpxchg_inatomic() with user_atomic_cmpxchg_inatomic Qiaowei Ren
2013-12-16 17:24 ` [tip:x86/mpx] " tip-bot for Qiaowei Ren
2013-12-16 12:21 ` [PATCH 1/2] x86: add user_atomic_cmpxchg_inatomic at uaccess.h Ren, Qiaowei
2013-12-16 16:55 ` H. Peter Anvin
2013-12-16 17:24 ` [tip:x86/mpx] " tip-bot for Qiaowei Ren
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox