aboutsummaryrefslogtreecommitdiffstats
path: root/include/asm-generic
diff options
context:
space:
mode:
authorAl Viro <viro@ftp.linux.org.uk>2007-07-17 08:49:35 +0100
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-07-17 11:01:07 -0700
commitd37c6e1b67e8d7f3c5fceba491dcb09a15cb7772 (patch)
tree0475cc3e841e22994b3a779125d4cd90b52dc76d /include/asm-generic
parentcc040a8a0e8ba95fbb0ae1edcb9ec83623b422e3 (diff)
downloadkernel_samsung_crespo-d37c6e1b67e8d7f3c5fceba491dcb09a15cb7772.zip
kernel_samsung_crespo-d37c6e1b67e8d7f3c5fceba491dcb09a15cb7772.tar.gz
kernel_samsung_crespo-d37c6e1b67e8d7f3c5fceba491dcb09a15cb7772.tar.bz2
saner typechecking in generic unaligned.h
Verify that types would match for assignment (under sizeof, so we are safe from side effects or any code actually getting generated), then explicitly cast everywhere to the fixed-sized types. Kills a bunch of bogus warnings about constants being truncated (gcc, sparse), finds a pile of endianness problems hidden by old noise (sparse). Signed-off-by: Al Viro <viro@zeniv.linux.org.uk> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/asm-generic')
-rw-r--r--include/asm-generic/unaligned.h16
1 files changed, 9 insertions, 7 deletions
diff --git a/include/asm-generic/unaligned.h b/include/asm-generic/unaligned.h
index 09ec447..16a466e 100644
--- a/include/asm-generic/unaligned.h
+++ b/include/asm-generic/unaligned.h
@@ -18,7 +18,8 @@
#define get_unaligned(ptr) \
__get_unaligned((ptr), sizeof(*(ptr)))
#define put_unaligned(x,ptr) \
- __put_unaligned((__u64)(x), (ptr), sizeof(*(ptr)))
+ ((void)sizeof(*(ptr)=(x)),\
+ __put_unaligned((__force __u64)(x), (ptr), sizeof(*(ptr))))
/*
* This function doesn't actually exist. The idea is that when
@@ -95,21 +96,21 @@ static inline void __ustw(__u16 val, __u16 *addr)
default: \
bad_unaligned_access_length(); \
}; \
- (__typeof__(*(ptr)))val; \
+ (__force __typeof__(*(ptr)))val; \
})
#define __put_unaligned(val, ptr, size) \
-do { \
+({ \
void *__gu_p = ptr; \
switch (size) { \
case 1: \
- *(__u8 *)__gu_p = val; \
+ *(__u8 *)__gu_p = (__force __u8)val; \
break; \
case 2: \
- __ustw(val, __gu_p); \
+ __ustw((__force __u16)val, __gu_p); \
break; \
case 4: \
- __ustl(val, __gu_p); \
+ __ustl((__force __u32)val, __gu_p); \
break; \
case 8: \
__ustq(val, __gu_p); \
@@ -117,6 +118,7 @@ do { \
default: \
bad_unaligned_access_length(); \
}; \
-} while(0)
+ (void)0; \
+})
#endif /* _ASM_GENERIC_UNALIGNED_H */