diff options
author | Ingo Molnar <mingo@kernel.org> | 2022-01-22 13:39:15 +0100 |
---|---|---|
committer | Ingo Molnar <mingo@kernel.org> | 2023-09-22 09:34:50 +0200 |
commit | ad424743256b0119bd60a9248db4df5d998000a4 (patch) | |
tree | 2238036c200a8320bb168d6dfee819cfffe5f677 /arch/x86/lib | |
parent | 7c097ca50d2ba7f7989f01175f366151256bfa10 (diff) |
x86/bitops: Remove unused __sw_hweight64() assembly implementation on x86-32
Header cleanups in the fast-headers tree highlighted that we have an
unused assembly implementation for __sw_hweight64():
WARNING: modpost: EXPORT symbol "__sw_hweight64" [vmlinux] version ...
__arch_hweight64() on x86-32 is defined in the
arch/x86/include/asm/arch_hweight.h header as an inline, using
__arch_hweight32():
#ifdef CONFIG_X86_32
static inline unsigned long __arch_hweight64(__u64 w)
{
return __arch_hweight32((u32)w) +
__arch_hweight32((u32)(w >> 32));
}
*But* there's also a __sw_hweight64() assembly implementation:
arch/x86/lib/hweight.S
SYM_FUNC_START(__sw_hweight64)
#ifdef CONFIG_X86_64
...
#else /* CONFIG_X86_32 */
/* We're getting an u64 arg in (%eax,%edx): unsigned long hweight64(__u64 w) */
pushl %ecx
call __sw_hweight32
movl %eax, %ecx # stash away result
movl %edx, %eax # second part of input
call __sw_hweight32
addl %ecx, %eax # result
popl %ecx
ret
#endif
But this __sw_hweight64 assembly implementation is unused - and it's
essentially doing the same thing that the inline wrapper does.
Remove the assembly version and add a comment about it.
Reported-by: Nathan Chancellor <nathan@kernel.org>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: linux-kernel@vger.kernel.org
Diffstat (limited to 'arch/x86/lib')
-rw-r--r-- | arch/x86/lib/hweight.S | 20 |
1 files changed, 6 insertions, 14 deletions
diff --git a/arch/x86/lib/hweight.S b/arch/x86/lib/hweight.S index 12c16c6aa44a..0a152e51d3f5 100644 --- a/arch/x86/lib/hweight.S +++ b/arch/x86/lib/hweight.S @@ -36,8 +36,12 @@ SYM_FUNC_START(__sw_hweight32) SYM_FUNC_END(__sw_hweight32) EXPORT_SYMBOL(__sw_hweight32) -SYM_FUNC_START(__sw_hweight64) +/* + * No 32-bit variant, because it's implemented as an inline wrapper + * on top of __arch_hweight32(): + */ #ifdef CONFIG_X86_64 +SYM_FUNC_START(__sw_hweight64) pushq %rdi pushq %rdx @@ -66,18 +70,6 @@ SYM_FUNC_START(__sw_hweight64) popq %rdx popq %rdi RET -#else /* CONFIG_X86_32 */ - /* We're getting an u64 arg in (%eax,%edx): unsigned long hweight64(__u64 w) */ - pushl %ecx - - call __sw_hweight32 - movl %eax, %ecx # stash away result - movl %edx, %eax # second part of input - call __sw_hweight32 - addl %ecx, %eax # result - - popl %ecx - RET -#endif SYM_FUNC_END(__sw_hweight64) EXPORT_SYMBOL(__sw_hweight64) +#endif |