mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-28 11:18:45 +07:00
7cf76a68f1
Some functions prototypes were missing for the non-altivec code. Add the missing prototypes in a new header file, fix warnings treated as errors with W=1: arch/powerpc/lib/xor_vmx_glue.c:18:6: error: no previous prototype for ‘xor_altivec_2’ [-Werror=missing-prototypes] arch/powerpc/lib/xor_vmx_glue.c:29:6: error: no previous prototype for ‘xor_altivec_3’ [-Werror=missing-prototypes] arch/powerpc/lib/xor_vmx_glue.c:40:6: error: no previous prototype for ‘xor_altivec_4’ [-Werror=missing-prototypes] arch/powerpc/lib/xor_vmx_glue.c:52:6: error: no previous prototype for ‘xor_altivec_5’ [-Werror=missing-prototypes] The prototypes were already present in <asm/xor.h> but this header file is meant to be included after <include/linux/raid/xor.h>. Trying to re-use <asm/xor.h> directly would lead to warnings such as: arch/powerpc/include/asm/xor.h:39:15: error: variable ‘xor_block_altivec’ has initializer but incomplete type Trying to re-use <asm/xor.h> after <include/linux/raid/xor.h> in xor_vmx_glue.c would in turn trigger the following warnings: include/asm-generic/xor.h:688:34: error: ‘xor_block_32regs’ defined but not used [-Werror=unused-variable] Signed-off-by: Mathieu Malaterre <malat@debian.org> Signed-off-by: Michael Ellerman <mpe@ellerman.id.au>
64 lines
1.6 KiB
C
64 lines
1.6 KiB
C
/*
|
|
* Altivec XOR operations
|
|
*
|
|
* Copyright 2017 IBM Corp.
|
|
*
|
|
* This program is free software; you can redistribute it and/or
|
|
* modify it under the terms of the GNU General Public License
|
|
* as published by the Free Software Foundation; either version
|
|
* 2 of the License, or (at your option) any later version.
|
|
*/
|
|
|
|
#include <linux/preempt.h>
|
|
#include <linux/export.h>
|
|
#include <linux/sched.h>
|
|
#include <asm/switch_to.h>
|
|
#include <asm/xor_altivec.h>
|
|
#include "xor_vmx.h"
|
|
|
|
void xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
|
|
unsigned long *v2_in)
|
|
{
|
|
preempt_disable();
|
|
enable_kernel_altivec();
|
|
__xor_altivec_2(bytes, v1_in, v2_in);
|
|
disable_kernel_altivec();
|
|
preempt_enable();
|
|
}
|
|
EXPORT_SYMBOL(xor_altivec_2);
|
|
|
|
void xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
|
|
unsigned long *v2_in, unsigned long *v3_in)
|
|
{
|
|
preempt_disable();
|
|
enable_kernel_altivec();
|
|
__xor_altivec_3(bytes, v1_in, v2_in, v3_in);
|
|
disable_kernel_altivec();
|
|
preempt_enable();
|
|
}
|
|
EXPORT_SYMBOL(xor_altivec_3);
|
|
|
|
void xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
|
|
unsigned long *v2_in, unsigned long *v3_in,
|
|
unsigned long *v4_in)
|
|
{
|
|
preempt_disable();
|
|
enable_kernel_altivec();
|
|
__xor_altivec_4(bytes, v1_in, v2_in, v3_in, v4_in);
|
|
disable_kernel_altivec();
|
|
preempt_enable();
|
|
}
|
|
EXPORT_SYMBOL(xor_altivec_4);
|
|
|
|
void xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
|
|
unsigned long *v2_in, unsigned long *v3_in,
|
|
unsigned long *v4_in, unsigned long *v5_in)
|
|
{
|
|
preempt_disable();
|
|
enable_kernel_altivec();
|
|
__xor_altivec_5(bytes, v1_in, v2_in, v3_in, v4_in, v5_in);
|
|
disable_kernel_altivec();
|
|
preempt_enable();
|
|
}
|
|
EXPORT_SYMBOL(xor_altivec_5);
|