aboutsummaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
authorAkinobu Mita <akinobu.mita@gmail.com>2011-05-26 16:26:06 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2011-05-26 17:12:38 -0700
commita2812e178321132811a53f7be40fe7e9bbffd9e0 (patch)
tree5e1ae6d683d964cce8b4f107d8032522ac69b372 /arch
parente0819410dba141338ebf6ab1057c1863be6247ab (diff)
downloadkernel_samsung_aries-a2812e178321132811a53f7be40fe7e9bbffd9e0.zip
kernel_samsung_aries-a2812e178321132811a53f7be40fe7e9bbffd9e0.tar.gz
kernel_samsung_aries-a2812e178321132811a53f7be40fe7e9bbffd9e0.tar.bz2
arch: add #define for each of optimized find bitops
The style that we normally use in asm-generic is to test the macro itself for existence, so in asm-generic, do: #ifndef find_next_zero_bit_le extern unsigned long find_next_zero_bit_le(const void *addr, unsigned long size, unsigned long offset); #endif and in the architectures, write static inline unsigned long find_next_zero_bit_le(const void *addr, unsigned long size, unsigned long offset) #define find_next_zero_bit_le find_next_zero_bit_le This adds the #define for each of the optimized find bitops in the architectures. Suggested-by: Arnd Bergmann <arnd@arndb.de> Signed-off-by: Akinobu Mita <akinobu.mita@gmail.com> Acked-by: Hans-Christian Egtvedt <hans-christian.egtvedt@atmel.com> Acked-by: Russell King <rmk+kernel@arm.linux.org.uk> Acked-by: Greg Ungerer <gerg@uclinux.org> Cc: Martin Schwidefsky <schwidefsky@de.ibm.com> Cc: Heiko Carstens <heiko.carstens@de.ibm.com> Acked-by: Geert Uytterhoeven <geert@linux-m68k.org> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/include/asm/bitops.h3
-rw-r--r--arch/avr32/include/asm/bitops.h15
-rw-r--r--arch/m68k/include/asm/bitops_mm.h8
-rw-r--r--arch/m68k/include/asm/bitops_no.h2
-rw-r--r--arch/s390/include/asm/bitops.h8
5 files changed, 36 insertions, 0 deletions
diff --git a/arch/arm/include/asm/bitops.h b/arch/arm/include/asm/bitops.h
index 6b7403f..18a024b 100644
--- a/arch/arm/include/asm/bitops.h
+++ b/arch/arm/include/asm/bitops.h
@@ -326,16 +326,19 @@ static inline int find_first_zero_bit_le(const void *p, unsigned size)
{
return _find_first_zero_bit_le(p, size);
}
+#define find_first_zero_bit_le find_first_zero_bit_le
static inline int find_next_zero_bit_le(const void *p, int size, int offset)
{
return _find_next_zero_bit_le(p, size, offset);
}
+#define find_next_zero_bit_le find_next_zero_bit_le
static inline int find_next_bit_le(const void *p, int size, int offset)
{
return _find_next_bit_le(p, size, offset);
}
+#define find_next_bit_le find_next_bit_le
/*
* Ext2 is defined to use little-endian byte ordering.
diff --git a/arch/avr32/include/asm/bitops.h b/arch/avr32/include/asm/bitops.h
index 72444d9..b70c19b 100644
--- a/arch/avr32/include/asm/bitops.h
+++ b/arch/avr32/include/asm/bitops.h
@@ -270,14 +270,21 @@ static inline int __fls(unsigned long word)
unsigned long find_first_zero_bit(const unsigned long *addr,
unsigned long size);
+#define find_first_zero_bit find_first_zero_bit
+
unsigned long find_next_zero_bit(const unsigned long *addr,
unsigned long size,
unsigned long offset);
+#define find_next_zero_bit find_next_zero_bit
+
unsigned long find_first_bit(const unsigned long *addr,
unsigned long size);
+#define find_first_bit find_first_bit
+
unsigned long find_next_bit(const unsigned long *addr,
unsigned long size,
unsigned long offset);
+#define find_next_bit find_next_bit
/*
* ffs: find first bit set. This is defined the same way as
@@ -299,6 +306,14 @@ static inline int ffs(unsigned long word)
#include <asm-generic/bitops/hweight.h>
#include <asm-generic/bitops/lock.h>
+extern unsigned long find_next_zero_bit_le(const void *addr,
+ unsigned long size, unsigned long offset);
+#define find_next_zero_bit_le find_next_zero_bit_le
+
+extern unsigned long find_next_bit_le(const void *addr,
+ unsigned long size, unsigned long offset);
+#define find_next_bit_le find_next_bit_le
+
#include <asm-generic/bitops/le.h>
#include <asm-generic/bitops/ext2-atomic.h>
diff --git a/arch/m68k/include/asm/bitops_mm.h b/arch/m68k/include/asm/bitops_mm.h
index e9020f8..89cf5b8 100644
--- a/arch/m68k/include/asm/bitops_mm.h
+++ b/arch/m68k/include/asm/bitops_mm.h
@@ -200,6 +200,7 @@ out:
res += ((long)p - (long)vaddr - 4) * 8;
return res < size ? res : size;
}
+#define find_first_zero_bit find_first_zero_bit
static inline int find_next_zero_bit(const unsigned long *vaddr, int size,
int offset)
@@ -229,6 +230,7 @@ static inline int find_next_zero_bit(const unsigned long *vaddr, int size,
/* No zero yet, search remaining full bytes for a zero */
return offset + find_first_zero_bit(p, size - offset);
}
+#define find_next_zero_bit find_next_zero_bit
static inline int find_first_bit(const unsigned long *vaddr, unsigned size)
{
@@ -253,6 +255,7 @@ out:
res += ((long)p - (long)vaddr - 4) * 8;
return res < size ? res : size;
}
+#define find_first_bit find_first_bit
static inline int find_next_bit(const unsigned long *vaddr, int size,
int offset)
@@ -282,6 +285,7 @@ static inline int find_next_bit(const unsigned long *vaddr, int size,
/* No one yet, search remaining full bytes for a one */
return offset + find_first_bit(p, size - offset);
}
+#define find_next_bit find_next_bit
/*
* ffz = Find First Zero in word. Undefined if no zero exists,
@@ -398,6 +402,7 @@ out:
res += (p - addr) * 32;
return res < size ? res : size;
}
+#define find_first_zero_bit_le find_first_zero_bit_le
static inline unsigned long find_next_zero_bit_le(const void *addr,
unsigned long size, unsigned long offset)
@@ -427,6 +432,7 @@ static inline unsigned long find_next_zero_bit_le(const void *addr,
/* No zero yet, search remaining full bytes for a zero */
return offset + find_first_zero_bit_le(p, size - offset);
}
+#define find_next_zero_bit_le find_next_zero_bit_le
static inline int find_first_bit_le(const void *vaddr, unsigned size)
{
@@ -451,6 +457,7 @@ out:
res += (p - addr) * 32;
return res < size ? res : size;
}
+#define find_first_bit_le find_first_bit_le
static inline unsigned long find_next_bit_le(const void *addr,
unsigned long size, unsigned long offset)
@@ -480,6 +487,7 @@ static inline unsigned long find_next_bit_le(const void *addr,
/* No set bit yet, search remaining full bytes for a set bit */
return offset + find_first_bit_le(p, size - offset);
}
+#define find_next_bit_le find_next_bit_le
/* Bitmap functions for the ext2 filesystem. */
diff --git a/arch/m68k/include/asm/bitops_no.h b/arch/m68k/include/asm/bitops_no.h
index b816299..52793eb 100644
--- a/arch/m68k/include/asm/bitops_no.h
+++ b/arch/m68k/include/asm/bitops_no.h
@@ -319,6 +319,7 @@ found_first:
found_middle:
return result + ffz(__swab32(tmp));
}
+#define find_next_zero_bit_le find_next_zero_bit_le
static inline unsigned long find_next_bit_le(const void *addr, unsigned
long size, unsigned long offset)
@@ -363,6 +364,7 @@ found_middle:
found_middle_swap:
return result + __ffs(__swab32(tmp));
}
+#define find_next_bit_le find_next_bit_le
#endif /* __KERNEL__ */
diff --git a/arch/s390/include/asm/bitops.h b/arch/s390/include/asm/bitops.h
index e1c8f3a..426c974 100644
--- a/arch/s390/include/asm/bitops.h
+++ b/arch/s390/include/asm/bitops.h
@@ -621,6 +621,7 @@ static inline unsigned long find_first_zero_bit(const unsigned long *addr,
bits = __ffz_word(bytes*8, __load_ulong_be(addr, bytes));
return (bits < size) ? bits : size;
}
+#define find_first_zero_bit find_first_zero_bit
/**
* find_first_bit - find the first set bit in a memory region
@@ -641,6 +642,7 @@ static inline unsigned long find_first_bit(const unsigned long * addr,
bits = __ffs_word(bytes*8, __load_ulong_be(addr, bytes));
return (bits < size) ? bits : size;
}
+#define find_first_bit find_first_bit
/**
* find_next_zero_bit - find the first zero bit in a memory region
@@ -677,6 +679,7 @@ static inline int find_next_zero_bit (const unsigned long * addr,
}
return offset + find_first_zero_bit(p, size);
}
+#define find_next_zero_bit find_next_zero_bit
/**
* find_next_bit - find the first set bit in a memory region
@@ -713,6 +716,7 @@ static inline int find_next_bit (const unsigned long * addr,
}
return offset + find_first_bit(p, size);
}
+#define find_next_bit find_next_bit
/*
* Every architecture must define this function. It's the fastest
@@ -787,6 +791,7 @@ static inline int find_first_zero_bit_le(void *vaddr, unsigned int size)
bits = __ffz_word(bytes*8, __load_ulong_le(vaddr, bytes));
return (bits < size) ? bits : size;
}
+#define find_first_zero_bit_le find_first_zero_bit_le
static inline int find_next_zero_bit_le(void *vaddr, unsigned long size,
unsigned long offset)
@@ -816,6 +821,7 @@ static inline int find_next_zero_bit_le(void *vaddr, unsigned long size,
}
return offset + find_first_zero_bit_le(p, size);
}
+#define find_next_zero_bit_le find_next_zero_bit_le
static inline unsigned long find_first_bit_le(void *vaddr, unsigned long size)
{
@@ -827,6 +833,7 @@ static inline unsigned long find_first_bit_le(void *vaddr, unsigned long size)
bits = __ffs_word(bytes*8, __load_ulong_le(vaddr, bytes));
return (bits < size) ? bits : size;
}
+#define find_first_bit_le find_first_bit_le
static inline int find_next_bit_le(void *vaddr, unsigned long size,
unsigned long offset)
@@ -856,6 +863,7 @@ static inline int find_next_bit_le(void *vaddr, unsigned long size,
}
return offset + find_first_bit_le(p, size);
}
+#define find_next_bit_le find_next_bit_le
#define ext2_set_bit_atomic(lock, nr, addr) \
test_and_set_bit_le(nr, addr)