patch-2.1.34 linux/arch/alpha/lib/csum_partial_copy.c

Next file: linux/arch/alpha/mm/init.c
Previous file: linux/arch/alpha/kernel/irq.c
Back to the patch index
Back to the overall index

diff -u --recursive --new-file v2.1.33/linux/arch/alpha/lib/csum_partial_copy.c linux/arch/alpha/lib/csum_partial_copy.c
@@ -13,7 +13,7 @@
 
 
 #define ldq_u(x,y) \
-__asm__ __volatile__("ldq_u %0,%1":"=r" (x):"m" (*(unsigned long *)(y)))
+__asm__ __volatile__("ldq_u %0,%1":"=r" (x):"m" (*(const unsigned long *)(y)))
 
 #define stq_u(x,y) \
 __asm__ __volatile__("stq_u %1,%0":"=m" (*(unsigned long *)(y)):"r" (x))
@@ -72,7 +72,7 @@
  * Ok. This isn't fun, but this is the EASY case.
  */
 static inline unsigned long
-csum_partial_cfu_aligned(unsigned long *src, unsigned long *dst,
+csum_partial_cfu_aligned(const unsigned long *src, unsigned long *dst,
 			 long len, unsigned long checksum,
 			 int *errp)
 {
@@ -165,7 +165,7 @@
  * This is slightly less fun than the above..
  */
 static inline unsigned long
-csum_partial_cfu_src_aligned(unsigned long *src, unsigned long *dst,
+csum_partial_cfu_src_aligned(const unsigned long *src, unsigned long *dst,
 			     unsigned long doff,
 			     long len, unsigned long checksum,
 			     unsigned long partial_dest,
@@ -227,7 +227,7 @@
  * look at this too closely, you'll go blind.
  */
 static inline unsigned long
-csum_partial_cfu_unaligned(unsigned long * src, unsigned long * dst,
+csum_partial_cfu_unaligned(const unsigned long * src, unsigned long * dst,
 			   unsigned long soff, unsigned long doff,
 			   long len, unsigned long checksum,
 			   unsigned long partial_dest,
@@ -305,7 +305,7 @@
 }
 
 static unsigned int
-do_csum_partial_copy_from_user(char *src, char *dst, int len,
+do_csum_partial_copy_from_user(const char *src, char *dst, int len,
 			       unsigned int sum, int *errp)
 {
 	unsigned long checksum = (unsigned) sum;
@@ -316,12 +316,12 @@
 		if (!doff) {
 			if (!soff)
 				checksum = csum_partial_cfu_aligned(
-					(unsigned long *) src,
+					(const unsigned long *) src,
 					(unsigned long *) dst,
 					len-8, checksum, errp);
 			else
 				checksum = csum_partial_cfu_dest_aligned(
-					(unsigned long *) src,
+					(const unsigned long *) src,
 					(unsigned long *) dst,
 					soff, len-8, checksum, errp);
 		} else {
@@ -329,13 +329,13 @@
 			ldq_u(partial_dest, dst);
 			if (!soff)
 				checksum = csum_partial_cfu_src_aligned(
-					(unsigned long *) src,
+					(const unsigned long *) src,
 					(unsigned long *) dst,
 					doff, len-8, checksum,
 					partial_dest, errp);
 			else
 				checksum = csum_partial_cfu_unaligned(
-					(unsigned long *) src,
+					(const unsigned long *) src,
 					(unsigned long *) dst,
 					soff, doff, len-8, checksum,
 					partial_dest, errp);
@@ -352,7 +352,7 @@
 }
 
 unsigned int
-csum_partial_copy_from_user(char *src, char *dst, int len,
+csum_partial_copy_from_user(const char *src, char *dst, int len,
 			    unsigned int sum, int *errp)
 {
 	if (!access_ok(src, len, VERIFY_READ)) {

FUNET's LINUX-ADM group, linux-adm@nic.funet.fi
TCL-scripts by Sam Shen, slshen@lbl.gov