A `cat /dev/kmem' oopses the kernel.  This is because new Intel
copy_to_user() doesn't handle segfaults against the kernel-side source
address of the copy.

It is not obvious how to fix read_kmem(), and handling faults on either
source or dest is traditional behaviour, so fix it in the copy function.



 arch/i386/lib/usercopy.c |  174 ++++++++++++++++++++++++++---------------------
 1 files changed, 97 insertions(+), 77 deletions(-)

diff -puN arch/i386/lib/usercopy.c~copy_user-handle-kernel-fault arch/i386/lib/usercopy.c
--- 25/arch/i386/lib/usercopy.c~copy_user-handle-kernel-fault	2003-08-17 23:38:18.000000000 -0700
+++ 25-akpm/arch/i386/lib/usercopy.c	2003-08-17 23:39:33.000000000 -0700
@@ -222,85 +222,105 @@ __copy_user_intel(void *to, const void *
 {
 	int d0, d1;
 	__asm__ __volatile__(
-		       "       .align 2,0x90\n" 
-		       "0:     movl 32(%4), %%eax\n"
-		       "       cmpl $67, %0\n"     
-		       "       jbe 1f\n"            
-		       "       movl 64(%4), %%eax\n"
-		       "       .align 2,0x90\n"     
-		       "1:     movl 0(%4), %%eax\n" 
-		       "       movl 4(%4), %%edx\n" 
-		       "2:     movl %%eax, 0(%3)\n" 
-		       "21:    movl %%edx, 4(%3)\n" 
-		       "       movl 8(%4), %%eax\n" 
-		       "       movl 12(%4),%%edx\n" 
-		       "3:     movl %%eax, 8(%3)\n" 
-		       "31:    movl %%edx, 12(%3)\n"
-		       "       movl 16(%4), %%eax\n"
-		       "       movl 20(%4), %%edx\n"
-		       "4:     movl %%eax, 16(%3)\n"
-		       "41:    movl %%edx, 20(%3)\n"
-		       "       movl 24(%4), %%eax\n"
-		       "       movl 28(%4), %%edx\n"
-		       "10:    movl %%eax, 24(%3)\n"
-		       "51:    movl %%edx, 28(%3)\n"
-		       "       movl 32(%4), %%eax\n"
-		       "       movl 36(%4), %%edx\n"
-		       "11:    movl %%eax, 32(%3)\n"
-		       "61:    movl %%edx, 36(%3)\n"
-		       "       movl 40(%4), %%eax\n"
-		       "       movl 44(%4), %%edx\n"
-		       "12:    movl %%eax, 40(%3)\n"
-		       "71:    movl %%edx, 44(%3)\n"
-		       "       movl 48(%4), %%eax\n"
-		       "       movl 52(%4), %%edx\n"
-		       "13:    movl %%eax, 48(%3)\n"
-		       "81:    movl %%edx, 52(%3)\n"
-		       "       movl 56(%4), %%eax\n"
-		       "       movl 60(%4), %%edx\n"
-		       "14:    movl %%eax, 56(%3)\n"
-		       "91:    movl %%edx, 60(%3)\n"
-		       "       addl $-64, %0\n"     
-		       "       addl $64, %4\n"      
-		       "       addl $64, %3\n"      
-		       "       cmpl $63, %0\n"      
-		       "       ja  0b\n"            
-		       "5:     movl  %0, %%eax\n"   
-		       "       shrl  $2, %0\n"      
-		       "       andl  $3, %%eax\n"   
-		       "       cld\n"               
-		       "6:     rep; movsl\n"        
-		       "       movl %%eax, %0\n"    
-		       "7:     rep; movsb\n"		
-		       "8:\n"				
-		       ".section .fixup,\"ax\"\n"	
-		       "9:     lea 0(%%eax,%0,4),%0\n"	
-		       "       jmp 8b\n"               
-		       ".previous\n"			
-		       ".section __ex_table,\"a\"\n"	
-		       "       .align 4\n"		
-		       "       .long 2b,8b\n"		
-		       "       .long 21b,8b\n"	
-		       "       .long 3b,8b\n"		
-		       "       .long 31b,8b\n"	
-		       "       .long 4b,8b\n"		
-		       "       .long 41b,8b\n"	
-		       "       .long 10b,8b\n"	
-		       "       .long 51b,8b\n"	
-		       "       .long 11b,8b\n"	
-		       "       .long 61b,8b\n"	
-		       "       .long 12b,8b\n"	
-		       "       .long 71b,8b\n"	
-		       "       .long 13b,8b\n"	
-		       "       .long 81b,8b\n"	
-		       "       .long 14b,8b\n"	
-		       "       .long 91b,8b\n"	
-		       "       .long 6b,9b\n"		
-		       "       .long 7b,8b\n"          
-		       ".previous"			
+		       "       .align 2,0x90\n"
+		       "1:     movl 32(%4), %%eax\n"
+		       "       cmpl $67, %0\n"
+		       "       jbe 3f\n"
+		       "2:     movl 64(%4), %%eax\n"
+		       "       .align 2,0x90\n"
+		       "3:     movl 0(%4), %%eax\n"
+		       "4:     movl 4(%4), %%edx\n"
+		       "5:     movl %%eax, 0(%3)\n"
+		       "6:     movl %%edx, 4(%3)\n"
+		       "7:     movl 8(%4), %%eax\n"
+		       "8:     movl 12(%4),%%edx\n"
+		       "9:     movl %%eax, 8(%3)\n"
+		       "10:    movl %%edx, 12(%3)\n"
+		       "11:    movl 16(%4), %%eax\n"
+		       "12:    movl 20(%4), %%edx\n"
+		       "13:    movl %%eax, 16(%3)\n"
+		       "14:    movl %%edx, 20(%3)\n"
+		       "15:    movl 24(%4), %%eax\n"
+		       "16:    movl 28(%4), %%edx\n"
+		       "17:    movl %%eax, 24(%3)\n"
+		       "18:    movl %%edx, 28(%3)\n"
+		       "19:    movl 32(%4), %%eax\n"
+		       "20:    movl 36(%4), %%edx\n"
+		       "21:    movl %%eax, 32(%3)\n"
+		       "22:    movl %%edx, 36(%3)\n"
+		       "23:    movl 40(%4), %%eax\n"
+		       "24:    movl 44(%4), %%edx\n"
+		       "25:    movl %%eax, 40(%3)\n"
+		       "26:    movl %%edx, 44(%3)\n"
+		       "27:    movl 48(%4), %%eax\n"
+		       "28:    movl 52(%4), %%edx\n"
+		       "29:    movl %%eax, 48(%3)\n"
+		       "30:    movl %%edx, 52(%3)\n"
+		       "31:    movl 56(%4), %%eax\n"
+		       "32:    movl 60(%4), %%edx\n"
+		       "33:    movl %%eax, 56(%3)\n"
+		       "34:    movl %%edx, 60(%3)\n"
+		       "       addl $-64, %0\n"
+		       "       addl $64, %4\n"
+		       "       addl $64, %3\n"
+		       "       cmpl $63, %0\n"
+		       "       ja  1b\n"
+		       "35:    movl  %0, %%eax\n"
+		       "       shrl  $2, %0\n"
+		       "       andl  $3, %%eax\n"
+		       "       cld\n"
+		       "99:    rep; movsl\n"
+		       "36:    movl %%eax, %0\n"
+		       "37:    rep; movsb\n"
+		       "100:\n"
+		       ".section .fixup,\"ax\"\n"
+		       "101:   lea 0(%%eax,%0,4),%0\n"
+		       "       jmp 100b\n"
+		       ".previous\n"
+		       ".section __ex_table,\"a\"\n"
+		       "       .align 4\n"
+		       "       .long 1b,100b\n"
+		       "       .long 2b,100b\n"
+		       "       .long 3b,100b\n"
+		       "       .long 4b,100b\n"
+		       "       .long 5b,100b\n"
+		       "       .long 6b,100b\n"
+		       "       .long 7b,100b\n"
+		       "       .long 8b,100b\n"
+		       "       .long 9b,100b\n"
+		       "       .long 10b,100b\n"
+		       "       .long 11b,100b\n"
+		       "       .long 12b,100b\n"
+		       "       .long 13b,100b\n"
+		       "       .long 14b,100b\n"
+		       "       .long 15b,100b\n"
+		       "       .long 16b,100b\n"
+		       "       .long 17b,100b\n"
+		       "       .long 18b,100b\n"
+		       "       .long 19b,100b\n"
+		       "       .long 20b,100b\n"
+		       "       .long 21b,100b\n"
+		       "       .long 22b,100b\n"
+		       "       .long 23b,100b\n"
+		       "       .long 24b,100b\n"
+		       "       .long 25b,100b\n"
+		       "       .long 26b,100b\n"
+		       "       .long 27b,100b\n"
+		       "       .long 28b,100b\n"
+		       "       .long 29b,100b\n"
+		       "       .long 30b,100b\n"
+		       "       .long 31b,100b\n"
+		       "       .long 32b,100b\n"
+		       "       .long 33b,100b\n"
+		       "       .long 34b,100b\n"
+		       "       .long 35b,100b\n"
+		       "       .long 36b,100b\n"
+		       "       .long 37b,100b\n"
+		       "       .long 99b,101b\n"
+		       ".previous"
 		       : "=&c"(size), "=&D" (d0), "=&S" (d1)
 		       :  "1"(to), "2"(from), "0"(size)
-		       : "eax", "edx", "memory");			
+		       : "eax", "edx", "memory");
 	return size;
 }
 

_