patch-2.1.29 linux/include/asm-sparc/checksum.h

Next file: linux/include/asm-sparc/cprefix.h
Previous file: linux/include/asm-sparc/bpp.h
Back to the patch index
Back to the overall index

diff -u --recursive --new-file v2.1.28/linux/include/asm-sparc/checksum.h linux/include/asm-sparc/checksum.h
@@ -1,4 +1,4 @@
-/* $Id: checksum.h,v 1.22 1996/11/10 21:28:25 davem Exp $ */
+/* $Id: checksum.h,v 1.25 1997/02/19 15:51:19 jj Exp $ */
 #ifndef __SPARC_CHECKSUM_H
 #define __SPARC_CHECKSUM_H
 
@@ -8,12 +8,16 @@
  *  Copyright(C) 1995 Miguel de Icaza
  *  Copyright(C) 1996 David S. Miller
  *  Copyright(C) 1996 Eddie C. Dost
+ *  Copyright(C) 1997 Jakub Jelinek
  *
  * derived from:
  *	Alpha checksum c-code
  *      ix86 inline assembly
  *      RFC1071 Computing the Internet Checksum
  */
+ 
+#include <asm/uaccess.h>
+#include <asm/cprefix.h>
 
 /* computes the checksum of a memory block at buff, length len,
  * and adds in "sum" (32-bit)
@@ -34,11 +38,84 @@
  * here even more important to align src and dst on a 32-bit (or even
  * better 64-bit) boundary
  */
-extern unsigned int csum_partial_copy(char *src, char *dst, int len, int sum);
 
+/* FIXME: Remove these two macros ASAP */
+#define csum_partial_copy(src, dst, len, sum) \
+ 		       csum_partial_copy_nocheck(src,dst,len,sum)
 #define csum_partial_copy_fromuser(s, d, l, w)  \
-                       csum_partial_copy((char *) (s), (d), (l), (w))
+                         csum_partial_copy((char *) (s), (d), (l), (w))
+  
+extern __inline__ unsigned int 
+csum_partial_copy_nocheck (const char *src, char *dst, int len, 
+			   unsigned int sum)
+{
+	register unsigned int ret asm("o0") = (unsigned int)src;
+	register char *d asm("o1") = dst;
+	register int l asm("g1") = len;
+	
+	__asm__ __volatile__ ("
+		call " C_LABEL_STR(__csum_partial_copy_sparc_generic) "
+		 mov %4, %%g7
+	" : "=r" (ret) : "0" (ret), "r" (d), "r" (l), "r" (sum) :
+	"o1", "o2", "o3", "o4", "o5", "o7", "g1", "g2", "g3", "g4", "g5", "g7");
+	return ret;
+}
 
+extern __inline__ unsigned int 
+csum_partial_copy_from_user(const char *src, char *dst, int len, 
+			    unsigned int sum, int *err)
+  {
+	if (!access_ok (VERIFY_READ, src, len)) {
+		*err = -EFAULT;
+		memset (dst, 0, len);
+		return sum;
+	} else {
+		register unsigned int ret asm("o0") = (unsigned int)src;
+		register char *d asm("o1") = dst;
+		register int l asm("g1") = len;
+		register unsigned int s asm("g7") = sum;
+
+		__asm__ __volatile__ ("
+		.section __ex_table,#alloc
+		.align 4
+		.word 1f,2
+		.previous
+1:
+		call " C_LABEL_STR(__csum_partial_copy_sparc_generic) "
+		 st %5, [%%sp + 64]
+		" : "=r" (ret) : "0" (ret), "r" (d), "r" (l), "r" (s), "r" (err) :
+		"o1", "o2", "o3", "o4", "o5", "o7", "g1", "g2", "g3", "g4", "g5", "g7");
+		return ret;
+	}
+  }
+  
+extern __inline__ unsigned int 
+csum_partial_copy_to_user(const char *src, char *dst, int len, 
+			  unsigned int sum, int *err)
+{
+	if (!access_ok (VERIFY_WRITE, dst, len)) {
+		*err = -EFAULT;
+		return sum;
+	} else {
+		register unsigned int ret asm("o0") = (unsigned int)src;
+		register char *d asm("o1") = dst;
+		register int l asm("g1") = len;
+		register unsigned int s asm("g7") = sum;
+
+		__asm__ __volatile__ ("
+		.section __ex_table,#alloc
+		.align 4
+		.word 1f,1
+		.previous
+1:
+		call " C_LABEL_STR(__csum_partial_copy_sparc_generic) "
+		 st %5, [%%sp + 64]
+		" : "=r" (ret) : "0" (ret), "r" (d), "r" (l), "r" (s), "r" (err) :
+		"o1", "o2", "o3", "o4", "o5", "o7", "g1", "g2", "g3", "g4", "g5", "g7");
+		return ret;
+	}
+}
+  
 /* ihl is always 5 or greater, almost always is 5, and iph is word aligned
  * the majority of the time.
  */

FUNET's LINUX-ADM group, linux-adm@nic.funet.fi
TCL-scripts by Sam Shen, slshen@lbl.gov