mirror of
https://github.com/torvalds/linux.git
synced 2025-12-07 20:06:24 +00:00
Use riscv_has_extension_likely() to check for RISCV_ISA_EXT_ZBB,
replacing the use of asm goto with ALTERNATIVE.
The "likely" variant is used to match the behavior of the original
implementation using ALTERNATIVE("j %l[legacy]", "nop", ...).
Signed-off-by: Vivian Wang <wangruikang@iscas.ac.cn>
Link: https://patch.msgid.link/20251020-riscv-altn-helper-wip-v4-3-ef941c87669a@iscas.ac.cn
Signed-off-by: Paul Walmsley <pjw@kernel.org>
71 lines
1.5 KiB
C
71 lines
1.5 KiB
C
/* SPDX-License-Identifier: GPL-2.0 */
|
|
/*
|
|
* Based on arch/x86/include/asm/arch_hweight.h
|
|
*/
|
|
|
|
#ifndef _ASM_RISCV_HWEIGHT_H
|
|
#define _ASM_RISCV_HWEIGHT_H
|
|
|
|
#include <asm/alternative-macros.h>
|
|
#include <asm/hwcap.h>
|
|
|
|
#if (BITS_PER_LONG == 64)
|
|
#define CPOPW "cpopw "
|
|
#elif (BITS_PER_LONG == 32)
|
|
#define CPOPW "cpop "
|
|
#else
|
|
#error "Unexpected BITS_PER_LONG"
|
|
#endif
|
|
|
|
static __always_inline unsigned int __arch_hweight32(unsigned int w)
|
|
{
|
|
if (!(IS_ENABLED(CONFIG_RISCV_ISA_ZBB) &&
|
|
IS_ENABLED(CONFIG_TOOLCHAIN_HAS_ZBB) &&
|
|
riscv_has_extension_likely(RISCV_ISA_EXT_ZBB)))
|
|
return __sw_hweight32(w);
|
|
|
|
asm (".option push\n"
|
|
".option arch,+zbb\n"
|
|
CPOPW "%0, %1\n"
|
|
".option pop\n"
|
|
: "=r" (w) : "r" (w) :);
|
|
|
|
return w;
|
|
}
|
|
|
|
static inline unsigned int __arch_hweight16(unsigned int w)
|
|
{
|
|
return __arch_hweight32(w & 0xffff);
|
|
}
|
|
|
|
static inline unsigned int __arch_hweight8(unsigned int w)
|
|
{
|
|
return __arch_hweight32(w & 0xff);
|
|
}
|
|
|
|
#if BITS_PER_LONG == 64
|
|
static __always_inline unsigned long __arch_hweight64(__u64 w)
|
|
{
|
|
if (!(IS_ENABLED(CONFIG_RISCV_ISA_ZBB) &&
|
|
IS_ENABLED(CONFIG_TOOLCHAIN_HAS_ZBB) &&
|
|
riscv_has_extension_likely(RISCV_ISA_EXT_ZBB)))
|
|
return __sw_hweight64(w);
|
|
|
|
asm (".option push\n"
|
|
".option arch,+zbb\n"
|
|
"cpop %0, %1\n"
|
|
".option pop\n"
|
|
: "=r" (w) : "r" (w) :);
|
|
|
|
return w;
|
|
}
|
|
#else /* BITS_PER_LONG == 64 */
|
|
static inline unsigned long __arch_hweight64(__u64 w)
|
|
{
|
|
return __arch_hweight32((u32)w) +
|
|
__arch_hweight32((u32)(w >> 32));
|
|
}
|
|
#endif /* !(BITS_PER_LONG == 64) */
|
|
|
|
#endif /* _ASM_RISCV_HWEIGHT_H */
|