forked from luck/tmp_suning_uos_patched
x86, generic: optimize find_next_(zero_)bit for small constant-size bitmaps
This moves an optimization for searching constant-sized small bitmaps form x86_64-specific to generic code. On an i386 defconfig (the x86#testing one), the size of vmlinux hardly changes with this applied. I have observed only four places where this optimization avoids a call into find_next_bit: In the functions return_unused_surplus_pages, alloc_fresh_huge_page, and adjust_pool_surplus, this patch avoids a call for a 1-bit bitmap. In __next_cpu a call is avoided for a 32-bit bitmap. That's it. On x86_64, 52 locations are optimized with a minimal increase in code size: Current #testing defconfig: 146 x bsf, 27 x find_next_*bit text data bss dec hex filename 5392637 846592 724424 6963653 6a41c5 vmlinux After removing the x86_64 specific optimization for find_next_*bit: 94 x bsf, 79 x find_next_*bit text data bss dec hex filename 5392358 846592 724424 6963374 6a40ae vmlinux After this patch (making the optimization generic): 146 x bsf, 27 x find_next_*bit text data bss dec hex filename 5392396 846592 724424 6963412 6a40d4 vmlinux [ tglx@linutronix.de: build fixes ] Signed-off-by: Ingo Molnar <mingo@elte.hu>
This commit is contained in:
parent
60b6783a04
commit
64970b68d2
|
@ -1,11 +1,13 @@
|
|||
#ifndef _ASM_GENERIC_BITOPS_FIND_H_
|
||||
#define _ASM_GENERIC_BITOPS_FIND_H_
|
||||
|
||||
#ifndef CONFIG_GENERIC_FIND_NEXT_BIT
|
||||
extern unsigned long find_next_bit(const unsigned long *addr, unsigned long
|
||||
size, unsigned long offset);
|
||||
|
||||
extern unsigned long find_next_zero_bit(const unsigned long *addr, unsigned
|
||||
long size, unsigned long offset);
|
||||
#endif
|
||||
|
||||
#define find_first_bit(addr, size) find_next_bit((addr), (size), 0)
|
||||
#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0)
|
||||
|
|
|
@ -306,12 +306,6 @@ static int test_bit(int nr, const volatile unsigned long *addr);
|
|||
#undef BIT_ADDR
|
||||
#undef ADDR
|
||||
|
||||
unsigned long find_next_bit(const unsigned long *addr,
|
||||
unsigned long size, unsigned long offset);
|
||||
unsigned long find_next_zero_bit(const unsigned long *addr,
|
||||
unsigned long size, unsigned long offset);
|
||||
|
||||
|
||||
#ifdef CONFIG_X86_32
|
||||
# include "bitops_32.h"
|
||||
#else
|
||||
|
|
|
@ -15,16 +15,6 @@ static inline long __scanbit(unsigned long val, unsigned long max)
|
|||
return val;
|
||||
}
|
||||
|
||||
#define find_next_bit(addr,size,off) \
|
||||
((__builtin_constant_p(size) && (size) <= BITS_PER_LONG ? \
|
||||
((off) + (__scanbit((*(unsigned long *)addr) >> (off),(size)-(off)))) : \
|
||||
find_next_bit(addr,size,off)))
|
||||
|
||||
#define find_next_zero_bit(addr,size,off) \
|
||||
((__builtin_constant_p(size) && (size) <= BITS_PER_LONG ? \
|
||||
((off)+(__scanbit(~(((*(unsigned long *)addr)) >> (off)),(size)-(off)))) : \
|
||||
find_next_zero_bit(addr,size,off)))
|
||||
|
||||
#define find_first_bit(addr, size) \
|
||||
((__builtin_constant_p((size)) && (size) <= BITS_PER_LONG \
|
||||
? (__scanbit(*(unsigned long *)(addr), (size))) \
|
||||
|
|
|
@ -112,4 +112,81 @@ static inline unsigned fls_long(unsigned long l)
|
|||
return fls64(l);
|
||||
}
|
||||
|
||||
#ifdef __KERNEL__
|
||||
#ifdef CONFIG_GENERIC_FIND_NEXT_BIT
|
||||
extern unsigned long __find_next_bit(const unsigned long *addr,
|
||||
unsigned long size, unsigned long offset);
|
||||
|
||||
/**
|
||||
* find_next_bit - find the next set bit in a memory region
|
||||
* @addr: The address to base the search on
|
||||
* @offset: The bitnumber to start searching at
|
||||
* @size: The bitmap size in bits
|
||||
*/
|
||||
static __always_inline unsigned long
|
||||
find_next_bit(const unsigned long *addr, unsigned long size,
|
||||
unsigned long offset)
|
||||
{
|
||||
unsigned long value;
|
||||
|
||||
/* Avoid a function call if the bitmap size is a constant */
|
||||
/* and not bigger than BITS_PER_LONG. */
|
||||
|
||||
/* insert a sentinel so that __ffs returns size if there */
|
||||
/* are no set bits in the bitmap */
|
||||
if (__builtin_constant_p(size) && (size < BITS_PER_LONG)) {
|
||||
value = (*addr) & ((~0ul) << offset);
|
||||
value |= (1ul << size);
|
||||
return __ffs(value);
|
||||
}
|
||||
|
||||
/* the result of __ffs(0) is undefined, so it needs to be */
|
||||
/* handled separately */
|
||||
if (__builtin_constant_p(size) && (size == BITS_PER_LONG)) {
|
||||
value = (*addr) & ((~0ul) << offset);
|
||||
return (value == 0) ? BITS_PER_LONG : __ffs(value);
|
||||
}
|
||||
|
||||
/* size is not constant or too big */
|
||||
return __find_next_bit(addr, size, offset);
|
||||
}
|
||||
|
||||
extern unsigned long __find_next_zero_bit(const unsigned long *addr,
|
||||
unsigned long size, unsigned long offset);
|
||||
|
||||
/**
|
||||
* find_next_zero_bit - find the next cleared bit in a memory region
|
||||
* @addr: The address to base the search on
|
||||
* @offset: The bitnumber to start searching at
|
||||
* @size: The bitmap size in bits
|
||||
*/
|
||||
static __always_inline unsigned long
|
||||
find_next_zero_bit(const unsigned long *addr, unsigned long size,
|
||||
unsigned long offset)
|
||||
{
|
||||
unsigned long value;
|
||||
|
||||
/* Avoid a function call if the bitmap size is a constant */
|
||||
/* and not bigger than BITS_PER_LONG. */
|
||||
|
||||
/* insert a sentinel so that __ffs returns size if there */
|
||||
/* are no set bits in the bitmap */
|
||||
if (__builtin_constant_p(size) && (size < BITS_PER_LONG)) {
|
||||
value = (~(*addr)) & ((~0ul) << offset);
|
||||
value |= (1ul << size);
|
||||
return __ffs(value);
|
||||
}
|
||||
|
||||
/* the result of __ffs(0) is undefined, so it needs to be */
|
||||
/* handled separately */
|
||||
if (__builtin_constant_p(size) && (size == BITS_PER_LONG)) {
|
||||
value = (~(*addr)) & ((~0ul) << offset);
|
||||
return (value == 0) ? BITS_PER_LONG : __ffs(value);
|
||||
}
|
||||
|
||||
/* size is not constant or too big */
|
||||
return __find_next_zero_bit(addr, size, offset);
|
||||
}
|
||||
#endif /* CONFIG_GENERIC_FIND_NEXT_BIT */
|
||||
#endif /* __KERNEL__ */
|
||||
#endif
|
||||
|
|
|
@ -15,17 +15,12 @@
|
|||
#include <asm/byteorder.h>
|
||||
|
||||
#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
|
||||
#undef find_next_bit
|
||||
#undef find_next_zero_bit
|
||||
|
||||
/**
|
||||
* find_next_bit - find the next set bit in a memory region
|
||||
* @addr: The address to base the search on
|
||||
* @offset: The bitnumber to start searching at
|
||||
* @size: The maximum size to search
|
||||
/*
|
||||
* Find the next set bit in a memory region.
|
||||
*/
|
||||
unsigned long find_next_bit(const unsigned long *addr, unsigned long size,
|
||||
unsigned long offset)
|
||||
unsigned long __find_next_bit(const unsigned long *addr,
|
||||
unsigned long size, unsigned long offset)
|
||||
{
|
||||
const unsigned long *p = addr + BITOP_WORD(offset);
|
||||
unsigned long result = offset & ~(BITS_PER_LONG-1);
|
||||
|
@ -62,15 +57,14 @@ unsigned long find_next_bit(const unsigned long *addr, unsigned long size,
|
|||
found_middle:
|
||||
return result + __ffs(tmp);
|
||||
}
|
||||
|
||||
EXPORT_SYMBOL(find_next_bit);
|
||||
EXPORT_SYMBOL(__find_next_bit);
|
||||
|
||||
/*
|
||||
* This implementation of find_{first,next}_zero_bit was stolen from
|
||||
* Linus' asm-alpha/bitops.h.
|
||||
*/
|
||||
unsigned long find_next_zero_bit(const unsigned long *addr, unsigned long size,
|
||||
unsigned long offset)
|
||||
unsigned long __find_next_zero_bit(const unsigned long *addr,
|
||||
unsigned long size, unsigned long offset)
|
||||
{
|
||||
const unsigned long *p = addr + BITOP_WORD(offset);
|
||||
unsigned long result = offset & ~(BITS_PER_LONG-1);
|
||||
|
@ -107,8 +101,7 @@ unsigned long find_next_zero_bit(const unsigned long *addr, unsigned long size,
|
|||
found_middle:
|
||||
return result + ffz(tmp);
|
||||
}
|
||||
|
||||
EXPORT_SYMBOL(find_next_zero_bit);
|
||||
EXPORT_SYMBOL(__find_next_zero_bit);
|
||||
|
||||
#ifdef __BIG_ENDIAN
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user