lkml.org 
[lkml]   [2021]   [Oct]   [5]   [last100]   RSS Feed
Views: [wrap][no wrap]   [headers]  [forward] 
 
Messages in this thread
    Patch in this message
    /
    From
    Subject[PATCH 02/16] bitops: move find_bit_*_le functions from le.h to find.h
    Date
    It's convenient to have all find_bit declarations in one place.

    Signed-off-by: Yury Norov <yury.norov@gmail.com>
    Tested-by: Wolfram Sang <wsa+renesas@sang-engineering.com>
    ---
    include/asm-generic/bitops/find.h | 69 +++++++++++++++++++++++++++++++
    include/asm-generic/bitops/le.h | 64 ----------------------------
    2 files changed, 69 insertions(+), 64 deletions(-)

    diff --git a/include/asm-generic/bitops/find.h b/include/asm-generic/bitops/find.h
    index 835f959a25f2..91b1b23f2b0c 100644
    --- a/include/asm-generic/bitops/find.h
    +++ b/include/asm-generic/bitops/find.h
    @@ -190,4 +190,73 @@ extern unsigned long find_next_clump8(unsigned long *clump,
    #define find_first_clump8(clump, bits, size) \
    find_next_clump8((clump), (bits), (size), 0)

    +#if defined(__LITTLE_ENDIAN)
    +
    +static inline unsigned long find_next_zero_bit_le(const void *addr,
    + unsigned long size, unsigned long offset)
    +{
    + return find_next_zero_bit(addr, size, offset);
    +}
    +
    +static inline unsigned long find_next_bit_le(const void *addr,
    + unsigned long size, unsigned long offset)
    +{
    + return find_next_bit(addr, size, offset);
    +}
    +
    +static inline unsigned long find_first_zero_bit_le(const void *addr,
    + unsigned long size)
    +{
    + return find_first_zero_bit(addr, size);
    +}
    +
    +#elif defined(__BIG_ENDIAN)
    +
    +#ifndef find_next_zero_bit_le
    +static inline
    +unsigned long find_next_zero_bit_le(const void *addr, unsigned
    + long size, unsigned long offset)
    +{
    + if (small_const_nbits(size)) {
    + unsigned long val = *(const unsigned long *)addr;
    +
    + if (unlikely(offset >= size))
    + return size;
    +
    + val = swab(val) | ~GENMASK(size - 1, offset);
    + return val == ~0UL ? size : ffz(val);
    + }
    +
    + return _find_next_bit(addr, NULL, size, offset, ~0UL, 1);
    +}
    +#endif
    +
    +#ifndef find_next_bit_le
    +static inline
    +unsigned long find_next_bit_le(const void *addr, unsigned
    + long size, unsigned long offset)
    +{
    + if (small_const_nbits(size)) {
    + unsigned long val = *(const unsigned long *)addr;
    +
    + if (unlikely(offset >= size))
    + return size;
    +
    + val = swab(val) & GENMASK(size - 1, offset);
    + return val ? __ffs(val) : size;
    + }
    +
    + return _find_next_bit(addr, NULL, size, offset, 0UL, 1);
    +}
    +#endif
    +
    +#ifndef find_first_zero_bit_le
    +#define find_first_zero_bit_le(addr, size) \
    + find_next_zero_bit_le((addr), (size), 0)
    +#endif
    +
    +#else
    +#error "Please fix <asm/byteorder.h>"
    +#endif
    +
    #endif /*_ASM_GENERIC_BITOPS_FIND_H_ */
    diff --git a/include/asm-generic/bitops/le.h b/include/asm-generic/bitops/le.h
    index 5a28629cbf4d..d51beff60375 100644
    --- a/include/asm-generic/bitops/le.h
    +++ b/include/asm-generic/bitops/le.h
    @@ -2,83 +2,19 @@
    #ifndef _ASM_GENERIC_BITOPS_LE_H_
    #define _ASM_GENERIC_BITOPS_LE_H_

    -#include <asm-generic/bitops/find.h>
    #include <asm/types.h>
    #include <asm/byteorder.h>
    -#include <linux/swab.h>

    #if defined(__LITTLE_ENDIAN)

    #define BITOP_LE_SWIZZLE 0

    -static inline unsigned long find_next_zero_bit_le(const void *addr,
    - unsigned long size, unsigned long offset)
    -{
    - return find_next_zero_bit(addr, size, offset);
    -}
    -
    -static inline unsigned long find_next_bit_le(const void *addr,
    - unsigned long size, unsigned long offset)
    -{
    - return find_next_bit(addr, size, offset);
    -}
    -
    -static inline unsigned long find_first_zero_bit_le(const void *addr,
    - unsigned long size)
    -{
    - return find_first_zero_bit(addr, size);
    -}
    -
    #elif defined(__BIG_ENDIAN)

    #define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)

    -#ifndef find_next_zero_bit_le
    -static inline
    -unsigned long find_next_zero_bit_le(const void *addr, unsigned
    - long size, unsigned long offset)
    -{
    - if (small_const_nbits(size)) {
    - unsigned long val = *(const unsigned long *)addr;
    -
    - if (unlikely(offset >= size))
    - return size;
    -
    - val = swab(val) | ~GENMASK(size - 1, offset);
    - return val == ~0UL ? size : ffz(val);
    - }
    -
    - return _find_next_bit(addr, NULL, size, offset, ~0UL, 1);
    -}
    -#endif
    -
    -#ifndef find_next_bit_le
    -static inline
    -unsigned long find_next_bit_le(const void *addr, unsigned
    - long size, unsigned long offset)
    -{
    - if (small_const_nbits(size)) {
    - unsigned long val = *(const unsigned long *)addr;
    -
    - if (unlikely(offset >= size))
    - return size;
    -
    - val = swab(val) & GENMASK(size - 1, offset);
    - return val ? __ffs(val) : size;
    - }
    -
    - return _find_next_bit(addr, NULL, size, offset, 0UL, 1);
    -}
    #endif

    -#ifndef find_first_zero_bit_le
    -#define find_first_zero_bit_le(addr, size) \
    - find_next_zero_bit_le((addr), (size), 0)
    -#endif
    -
    -#else
    -#error "Please fix <asm/byteorder.h>"
    -#endif

    static inline int test_bit_le(int nr, const void *addr)
    {
    --
    2.30.2
    \
     
     \ /
      Last update: 2021-10-05 07:41    [W:4.968 / U:0.068 seconds]
    ©2003-2020 Jasper Spaans|hosted at Digital Ocean and TransIP|Read the blog|Advertise on this site