[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index] [PATCH 5/9] xen/bitops: Introduce generic_hweightl() and hweightl()
There are 6 remaining callers in Xen: * The two hweight32() calls, _domain_struct_bits() and efi_find_gop_mode(), are __init only. * The two hweight_long() calls are both in bitmap_weight(). * The two hweight64() calls are hv_vpset_nr_banks() and x86_emulate(). Only bitmap_weight() and possibly hv_vpset_nr_banks() can be considered fast(ish) paths, and they're all of GPR-width form. Furthermore, the differences between a generic int and generic long form is only an ADD and SHIFT, and only in !CONFIG_HAS_FAST_MULTIPLY builds. Therefore, it is definitely not worth having both generic implemenations. Implement generic_hweightl() based on the current generic_hweight64(), adjusted to be compatible with ARM32, along with standard SELF_TESTS. Implement hweightl() with usual constant-folding and arch opt-in support. PPC is the only architecture that devates from generic, and it simply uses the builtin. No functional change. Signed-off-by: Andrew Cooper <andrew.cooper3@xxxxxxxxxx> --- CC: Jan Beulich <JBeulich@xxxxxxxx> CC: Roger Pau Monné <roger.pau@xxxxxxxxxx> CC: Wei Liu <wl@xxxxxxx> CC: Stefano Stabellini <sstabellini@xxxxxxxxxx> CC: Julien Grall <julien@xxxxxxx> CC: Volodymyr Babchuk <Volodymyr_Babchuk@xxxxxxxx> CC: Bertrand Marquis <bertrand.marquis@xxxxxxx> CC: Michal Orzel <michal.orzel@xxxxxxx> CC: Oleksii Kurochko <oleksii.kurochko@xxxxxxxxx> CC: Shawn Anastasio <sanastasio@xxxxxxxxxxxxxxxxxxxxx> --- xen/arch/ppc/include/asm/bitops.h | 2 ++ xen/common/bitops.c | 14 ++++++++++ xen/include/xen/bitops.h | 18 ++++++++++++ xen/lib/Makefile | 1 + xen/lib/generic-hweightl.c | 46 +++++++++++++++++++++++++++++++ 5 files changed, 81 insertions(+) create mode 100644 xen/lib/generic-hweightl.c diff --git a/xen/arch/ppc/include/asm/bitops.h b/xen/arch/ppc/include/asm/bitops.h index a62c4f99c3bb..64512e949530 100644 --- a/xen/arch/ppc/include/asm/bitops.h +++ b/xen/arch/ppc/include/asm/bitops.h @@ -124,6 +124,8 @@ static inline int test_and_set_bit(unsigned int nr, volatile void *addr) #define arch_fls(x) ((x) ? 32 - __builtin_clz(x) : 0) #define arch_flsl(x) ((x) ? BITS_PER_LONG - __builtin_clzl(x) : 0) +#define arch_hweightl(x) __builtin_popcountl(x) + /** * hweightN - returns the hamming weight of a N-bit word * @x: the word to weigh diff --git a/xen/common/bitops.c b/xen/common/bitops.c index 4545682aa8e0..d0c268b4994a 100644 --- a/xen/common/bitops.c +++ b/xen/common/bitops.c @@ -106,10 +106,24 @@ static void __init test_multiple_bits_set(void) CHECK(multiple_bits_set, 0xc000000000000000ULL, true); } +static void __init test_hweight(void) +{ + /* unsigned int hweightl(unsigned long) */ + CHECK(hweightl, 0, 0); + CHECK(hweightl, 1, 1); + CHECK(hweightl, 3, 2); + CHECK(hweightl, 7, 3); + CHECK(hweightl, 0xff, 8); + + CHECK(hweightl, 1 | (1UL << (BITS_PER_LONG - 1)), 2); + CHECK(hweightl, -1UL, BITS_PER_LONG); +} + static void __init __constructor test_bitops(void) { test_ffs(); test_fls(); test_multiple_bits_set(); + test_hweight(); } diff --git a/xen/include/xen/bitops.h b/xen/include/xen/bitops.h index 64d70a7a1cb5..3aac10b7f532 100644 --- a/xen/include/xen/bitops.h +++ b/xen/include/xen/bitops.h @@ -35,6 +35,12 @@ extern void __bitop_bad_size(void); unsigned int __pure generic_ffsl(unsigned long x); unsigned int __pure generic_flsl(unsigned long x); +/* + * Hamming Weight, also called Population Count. Returns the number of set + * bits in @x. + */ +unsigned int __pure generic_hweightl(unsigned long x); + /** * generic__test_and_set_bit - Set a bit and return its old value * @nr: Bit to set @@ -284,6 +290,18 @@ static always_inline __pure unsigned int fls64(uint64_t x) (_v & (_v - 1)) != 0; \ }) +static always_inline __pure unsigned int hweightl(unsigned long x) +{ + if ( __builtin_constant_p(x) ) + return __builtin_popcountl(x); + +#ifdef arch_hweightl + return arch_hweightl(x); +#else + return generic_hweightl(x); +#endif +} + /* --------------------- Please tidy below here --------------------- */ #ifndef find_next_bit diff --git a/xen/lib/Makefile b/xen/lib/Makefile index a48541596470..b6558e108bd9 100644 --- a/xen/lib/Makefile +++ b/xen/lib/Makefile @@ -6,6 +6,7 @@ lib-y += ctype.o lib-y += find-next-bit.o lib-y += generic-ffsl.o lib-y += generic-flsl.o +lib-y += generic-hweightl.o lib-y += list-sort.o lib-y += memchr.o lib-y += memchr_inv.o diff --git a/xen/lib/generic-hweightl.c b/xen/lib/generic-hweightl.c new file mode 100644 index 000000000000..fa4bbec273ab --- /dev/null +++ b/xen/lib/generic-hweightl.c @@ -0,0 +1,46 @@ +/* SPDX-License-Identifier: GPL-2.0-only */ + +#include <xen/bitops.h> +#include <xen/init.h> +#include <xen/self-tests.h> + +/* Mask value @b broadcast to every byte in a long */ +#if BITS_PER_LONG == 32 +# define MASK(b) ((b) * 0x01010101UL) +#elif BITS_PER_LONG == 64 +# define MASK(b) ((b) * 0x0101010101010101UL) +#else +# error Extend me please +#endif + +unsigned int generic_hweightl(unsigned long x) +{ + x -= (x >> 1) & MASK(0x55); + x = (x & MASK(0x33)) + ((x >> 2) & MASK(0x33)); + x = (x + (x >> 4)) & MASK(0x0f); + + if ( IS_ENABLED(CONFIG_HAS_FAST_MULTIPLY) ) + return (x * MASK(0x01)) >> (BITS_PER_LONG - 8); + + x += x >> 8; + x += x >> 16; +#if BITS_PER_LONG > 32 + x += x >> 32; +#endif + + return x & 0xff; +} + +#ifdef CONFIG_SELF_TESTS +static void __init __constructor test_generic_hweightl(void) +{ + RUNTIME_CHECK(generic_hweightl, 0, 0); + RUNTIME_CHECK(generic_hweightl, 1, 1); + RUNTIME_CHECK(generic_hweightl, 3, 2); + RUNTIME_CHECK(generic_hweightl, 7, 3); + RUNTIME_CHECK(generic_hweightl, 0xff, 8); + + RUNTIME_CHECK(generic_hweightl, 1 | (1UL << (BITS_PER_LONG - 1)), 2); + RUNTIME_CHECK(generic_hweightl, -1UL, BITS_PER_LONG); +} +#endif /* CONFIG_SELF_TESTS */ -- 2.39.2
|
Lists.xenproject.org is hosted with RackSpace, monitoring our |