lkml.org 
[lkml]   [2008]   [Jun]   [27]   [last100]   RSS Feed
Views: [wrap][no wrap]   [headers]  [forward] 
 
Messages in this thread
Patch in this message
/
From
Subject[PATCH 38/39] put movsl_mask into uaccess.h
Date
x86_64 does not need it, but it won't have X86_INTEL_USERCOPY
defined either.

Signed-off-by: Glauber Costa <gcosta@redhat.com>
---
include/asm-x86/uaccess.h | 9 +++++++++
include/asm-x86/uaccess_32.h | 9 ---------
2 files changed, 9 insertions(+), 9 deletions(-)
diff --git a/include/asm-x86/uaccess.h b/include/asm-x86/uaccess.h
index 4ebb992..ddc32fe 100644
--- a/include/asm-x86/uaccess.h
+++ b/include/asm-x86/uaccess.h
@@ -433,6 +433,15 @@ struct __large_struct { unsigned long buf[100]; };
#define __get_user_unaligned __get_user
#define __put_user_unaligned __put_user

+/*
+ * movsl can be slow when source and dest are not both 8-byte aligned
+ */
+#ifdef CONFIG_X86_INTEL_USERCOPY
+extern struct movsl_mask {
+ int mask;
+} ____cacheline_aligned_in_smp movsl_mask;
+#endif
+
#ifdef CONFIG_X86_32
# include "uaccess_32.h"
#else
diff --git a/include/asm-x86/uaccess_32.h b/include/asm-x86/uaccess_32.h
index d3b5bf8..3467749 100644
--- a/include/asm-x86/uaccess_32.h
+++ b/include/asm-x86/uaccess_32.h
@@ -11,15 +11,6 @@
#include <asm/asm.h>
#include <asm/page.h>

-/*
- * movsl can be slow when source and dest are not both 8-byte aligned
- */
-#ifdef CONFIG_X86_INTEL_USERCOPY
-extern struct movsl_mask {
- int mask;
-} ____cacheline_aligned_in_smp movsl_mask;
-#endif
-
unsigned long __must_check __copy_to_user_ll
(void __user *to, const void *from, unsigned long n);
unsigned long __must_check __copy_from_user_ll
--
1.5.5.1


\
 
 \ /
  Last update: 2008-06-27 23:55    [W:0.100 / U:0.048 seconds]
©2003-2011 Jasper Spaans. Advertise on this site