lkml.org 
[lkml]   [2011]   [Apr]   [26]   [last100]   RSS Feed
Views: [wrap][no wrap]   [headers]  [forward] 
 
Messages in this thread
Patch in this message
/
From
Subject[PATCH v2 1/6] arch: add #define for each of optimized find bitops
Date
The style that we normally use in asm-generic is to test the macro itself
for existence, so in asm-generic, do:

#ifndef find_next_zero_bit_le
extern unsigned long find_next_zero_bit_le(const void *addr,
unsigned long size, unsigned long offset);
#endif

and in the architectures, write

static inline unsigned long find_next_zero_bit_le(const void *addr,
unsigned long size, unsigned long offset)
#define find_next_zero_bit_le find_next_zero_bit_le

This adds the #define for each of the optimized find bitops in the
architectures.

Suggested-by: Arnd Bergmann <arnd@arndb.de>
Signed-off-by: Akinobu Mita <akinobu.mita@gmail.com>
Acked-by: Hans-Christian Egtvedt <hans-christian.egtvedt@atmel.com>
Cc: Russell King <linux@arm.linux.org.uk>
Cc: linux-arm-kernel@lists.infradead.org
Cc: Martin Schwidefsky <schwidefsky@de.ibm.com>
Cc: Heiko Carstens <heiko.carstens@de.ibm.com>
Cc: linux390@de.ibm.com
Cc: linux-s390@vger.kernel.org
Cc: Geert Uytterhoeven <geert@linux-m68k.org>
Cc: linux-m68k@lists.linux-m68k.org
Cc: Greg Ungerer <gerg@uclinux.org>
---

Changes from previous submission:
- Add Acked-by: line
- Rebase to mainline

arch/arm/include/asm/bitops.h | 3 +++
arch/avr32/include/asm/bitops.h | 15 +++++++++++++++
arch/m68k/include/asm/bitops_mm.h | 8 ++++++++
arch/m68k/include/asm/bitops_no.h | 1 +
arch/s390/include/asm/bitops.h | 8 ++++++++
5 files changed, 35 insertions(+), 0 deletions(-)

diff --git a/arch/arm/include/asm/bitops.h b/arch/arm/include/asm/bitops.h
index 6b7403f..18a024b 100644
--- a/arch/arm/include/asm/bitops.h
+++ b/arch/arm/include/asm/bitops.h
@@ -326,16 +326,19 @@ static inline int find_first_zero_bit_le(const void *p, unsigned size)
{
return _find_first_zero_bit_le(p, size);
}
+#define find_first_zero_bit_le find_first_zero_bit_le

static inline int find_next_zero_bit_le(const void *p, int size, int offset)
{
return _find_next_zero_bit_le(p, size, offset);
}
+#define find_next_zero_bit_le find_next_zero_bit_le

static inline int find_next_bit_le(const void *p, int size, int offset)
{
return _find_next_bit_le(p, size, offset);
}
+#define find_next_bit_le find_next_bit_le

/*
* Ext2 is defined to use little-endian byte ordering.
diff --git a/arch/avr32/include/asm/bitops.h b/arch/avr32/include/asm/bitops.h
index 72444d9..b70c19b 100644
--- a/arch/avr32/include/asm/bitops.h
+++ b/arch/avr32/include/asm/bitops.h
@@ -270,14 +270,21 @@ static inline int __fls(unsigned long word)

unsigned long find_first_zero_bit(const unsigned long *addr,
unsigned long size);
+#define find_first_zero_bit find_first_zero_bit
+
unsigned long find_next_zero_bit(const unsigned long *addr,
unsigned long size,
unsigned long offset);
+#define find_next_zero_bit find_next_zero_bit
+
unsigned long find_first_bit(const unsigned long *addr,
unsigned long size);
+#define find_first_bit find_first_bit
+
unsigned long find_next_bit(const unsigned long *addr,
unsigned long size,
unsigned long offset);
+#define find_next_bit find_next_bit

/*
* ffs: find first bit set. This is defined the same way as
@@ -299,6 +306,14 @@ static inline int ffs(unsigned long word)
#include <asm-generic/bitops/hweight.h>
#include <asm-generic/bitops/lock.h>

+extern unsigned long find_next_zero_bit_le(const void *addr,
+ unsigned long size, unsigned long offset);
+#define find_next_zero_bit_le find_next_zero_bit_le
+
+extern unsigned long find_next_bit_le(const void *addr,
+ unsigned long size, unsigned long offset);
+#define find_next_bit_le find_next_bit_le
+
#include <asm-generic/bitops/le.h>
#include <asm-generic/bitops/ext2-atomic.h>

diff --git a/arch/m68k/include/asm/bitops_mm.h b/arch/m68k/include/asm/bitops_mm.h
index 9d69f6e..a938ebe 100644
--- a/arch/m68k/include/asm/bitops_mm.h
+++ b/arch/m68k/include/asm/bitops_mm.h
@@ -198,6 +198,7 @@ static inline int find_first_zero_bit(const unsigned long *vaddr,
out:
return ((long)p - (long)vaddr - 4) * 8 + res;
}
+#define find_first_zero_bit find_first_zero_bit

static inline int find_next_zero_bit(const unsigned long *vaddr, int size,
int offset)
@@ -223,6 +224,7 @@ static inline int find_next_zero_bit(const unsigned long *vaddr, int size,
res = find_first_zero_bit(p, size - ((long)p - (long)vaddr) * 8);
return offset + res;
}
+#define find_next_zero_bit find_next_zero_bit

static inline int find_first_bit(const unsigned long *vaddr, unsigned size)
{
@@ -245,6 +247,7 @@ static inline int find_first_bit(const unsigned long *vaddr, unsigned size)
out:
return ((long)p - (long)vaddr - 4) * 8 + res;
}
+#define find_first_bit find_first_bit

static inline int find_next_bit(const unsigned long *vaddr, int size,
int offset)
@@ -270,6 +273,7 @@ static inline int find_next_bit(const unsigned long *vaddr, int size,
res = find_first_bit(p, size - ((long)p - (long)vaddr) * 8);
return offset + res;
}
+#define find_next_bit find_next_bit

/*
* ffz = Find First Zero in word. Undefined if no zero exists,
@@ -384,6 +388,7 @@ static inline int find_first_zero_bit_le(const void *vaddr, unsigned size)
break;
return (p - addr) * 32 + res;
}
+#define find_first_zero_bit_le find_first_zero_bit_le

static inline unsigned long find_next_zero_bit_le(const void *addr,
unsigned long size, unsigned long offset)
@@ -408,6 +413,7 @@ static inline unsigned long find_next_zero_bit_le(const void *addr,
/* No zero yet, search remaining full bytes for a zero */
return offset + find_first_zero_bit_le(p, size - offset);
}
+#define find_next_zero_bit_le find_next_zero_bit_le

static inline int find_first_bit_le(const void *vaddr, unsigned size)
{
@@ -429,6 +435,7 @@ static inline int find_first_bit_le(const void *vaddr, unsigned size)
break;
return (p - addr) * 32 + res;
}
+#define find_first_bit_le find_first_bit_le

static inline unsigned long find_next_bit_le(const void *addr,
unsigned long size, unsigned long offset)
@@ -453,6 +460,7 @@ static inline unsigned long find_next_bit_le(const void *addr,
/* No set bit yet, search remaining full bytes for a set bit */
return offset + find_first_bit_le(p, size - offset);
}
+#define find_next_bit_le find_next_bit_le

/* Bitmap functions for the ext2 filesystem. */

diff --git a/arch/m68k/include/asm/bitops_no.h b/arch/m68k/include/asm/bitops_no.h
index 7d3779f..614c613 100644
--- a/arch/m68k/include/asm/bitops_no.h
+++ b/arch/m68k/include/asm/bitops_no.h
@@ -335,6 +335,7 @@ found_first:
found_middle:
return result + ffz(__swab32(tmp));
}
+#define find_next_zero_bit_le find_next_zero_bit_le

#endif /* __KERNEL__ */

diff --git a/arch/s390/include/asm/bitops.h b/arch/s390/include/asm/bitops.h
index e1c8f3a..426c974 100644
--- a/arch/s390/include/asm/bitops.h
+++ b/arch/s390/include/asm/bitops.h
@@ -621,6 +621,7 @@ static inline unsigned long find_first_zero_bit(const unsigned long *addr,
bits = __ffz_word(bytes*8, __load_ulong_be(addr, bytes));
return (bits < size) ? bits : size;
}
+#define find_first_zero_bit find_first_zero_bit

/**
* find_first_bit - find the first set bit in a memory region
@@ -641,6 +642,7 @@ static inline unsigned long find_first_bit(const unsigned long * addr,
bits = __ffs_word(bytes*8, __load_ulong_be(addr, bytes));
return (bits < size) ? bits : size;
}
+#define find_first_bit find_first_bit

/**
* find_next_zero_bit - find the first zero bit in a memory region
@@ -677,6 +679,7 @@ static inline int find_next_zero_bit (const unsigned long * addr,
}
return offset + find_first_zero_bit(p, size);
}
+#define find_next_zero_bit find_next_zero_bit

/**
* find_next_bit - find the first set bit in a memory region
@@ -713,6 +716,7 @@ static inline int find_next_bit (const unsigned long * addr,
}
return offset + find_first_bit(p, size);
}
+#define find_next_bit find_next_bit

/*
* Every architecture must define this function. It's the fastest
@@ -787,6 +791,7 @@ static inline int find_first_zero_bit_le(void *vaddr, unsigned int size)
bits = __ffz_word(bytes*8, __load_ulong_le(vaddr, bytes));
return (bits < size) ? bits : size;
}
+#define find_first_zero_bit_le find_first_zero_bit_le

static inline int find_next_zero_bit_le(void *vaddr, unsigned long size,
unsigned long offset)
@@ -816,6 +821,7 @@ static inline int find_next_zero_bit_le(void *vaddr, unsigned long size,
}
return offset + find_first_zero_bit_le(p, size);
}
+#define find_next_zero_bit_le find_next_zero_bit_le

static inline unsigned long find_first_bit_le(void *vaddr, unsigned long size)
{
@@ -827,6 +833,7 @@ static inline unsigned long find_first_bit_le(void *vaddr, unsigned long size)
bits = __ffs_word(bytes*8, __load_ulong_le(vaddr, bytes));
return (bits < size) ? bits : size;
}
+#define find_first_bit_le find_first_bit_le

static inline int find_next_bit_le(void *vaddr, unsigned long size,
unsigned long offset)
@@ -856,6 +863,7 @@ static inline int find_next_bit_le(void *vaddr, unsigned long size,
}
return offset + find_first_bit_le(p, size);
}
+#define find_next_bit_le find_next_bit_le

#define ext2_set_bit_atomic(lock, nr, addr) \
test_and_set_bit_le(nr, addr)
--
1.7.4.4


\
 
 \ /
  Last update: 2011-04-26 15:25    [W:0.184 / U:0.392 seconds]
©2003-2020 Jasper Spaans|hosted at Digital Ocean and TransIP|Read the blog|Advertise on this site