mirror of
https://github.com/AuxXxilium/linux_dsm_epyc7002.git
synced 2024-12-26 15:05:24 +07:00
674f368a95
The CRYPTO_TFM_RES_BAD_KEY_LEN flag was apparently meant as a way to make the ->setkey() functions provide more information about errors. However, no one actually checks for this flag, which makes it pointless. Also, many algorithms fail to set this flag when given a bad length key. Reviewing just the generic implementations, this is the case for aes-fixed-time, cbcmac, echainiv, nhpoly1305, pcrypt, rfc3686, rfc4309, rfc7539, rfc7539esp, salsa20, seqiv, and xcbc. But there are probably many more in arch/*/crypto/ and drivers/crypto/. Some algorithms can even set this flag when the key is the correct length. For example, authenc and authencesn set it when the key payload is malformed in any way (not just a bad length), the atmel-sha and ccree drivers can set it if a memory allocation fails, and the chelsio driver sets it for bad auth tag lengths, not just bad key lengths. So even if someone actually wanted to start checking this flag (which seems unlikely, since it's been unused for a long time), there would be a lot of work needed to get it working correctly. But it would probably be much better to go back to the drawing board and just define different return values, like -EINVAL if the key is invalid for the algorithm vs. -EKEYREJECTED if the key was rejected by a policy like "no weak keys". That would be much simpler, less error-prone, and easier to test. So just remove this flag. Signed-off-by: Eric Biggers <ebiggers@google.com> Reviewed-by: Horia Geantă <horia.geanta@nxp.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
247 lines
5.4 KiB
C
247 lines
5.4 KiB
C
// SPDX-License-Identifier: GPL-2.0-only
|
|
/*
|
|
* Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
|
|
*
|
|
* Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org>
|
|
*/
|
|
|
|
#include <linux/cpufeature.h>
|
|
#include <linux/crc32.h>
|
|
#include <linux/init.h>
|
|
#include <linux/kernel.h>
|
|
#include <linux/module.h>
|
|
#include <linux/string.h>
|
|
|
|
#include <crypto/internal/hash.h>
|
|
#include <crypto/internal/simd.h>
|
|
|
|
#include <asm/hwcap.h>
|
|
#include <asm/neon.h>
|
|
#include <asm/simd.h>
|
|
#include <asm/unaligned.h>
|
|
|
|
#define PMULL_MIN_LEN 64L /* minimum size of buffer
|
|
* for crc32_pmull_le_16 */
|
|
#define SCALE_F 16L /* size of NEON register */
|
|
|
|
asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
|
|
asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
|
|
|
|
asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
|
|
asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
|
|
|
|
static u32 (*fallback_crc32)(u32 init_crc, const u8 buf[], u32 len);
|
|
static u32 (*fallback_crc32c)(u32 init_crc, const u8 buf[], u32 len);
|
|
|
|
static int crc32_cra_init(struct crypto_tfm *tfm)
|
|
{
|
|
u32 *key = crypto_tfm_ctx(tfm);
|
|
|
|
*key = 0;
|
|
return 0;
|
|
}
|
|
|
|
static int crc32c_cra_init(struct crypto_tfm *tfm)
|
|
{
|
|
u32 *key = crypto_tfm_ctx(tfm);
|
|
|
|
*key = ~0;
|
|
return 0;
|
|
}
|
|
|
|
static int crc32_setkey(struct crypto_shash *hash, const u8 *key,
|
|
unsigned int keylen)
|
|
{
|
|
u32 *mctx = crypto_shash_ctx(hash);
|
|
|
|
if (keylen != sizeof(u32))
|
|
return -EINVAL;
|
|
*mctx = le32_to_cpup((__le32 *)key);
|
|
return 0;
|
|
}
|
|
|
|
static int crc32_init(struct shash_desc *desc)
|
|
{
|
|
u32 *mctx = crypto_shash_ctx(desc->tfm);
|
|
u32 *crc = shash_desc_ctx(desc);
|
|
|
|
*crc = *mctx;
|
|
return 0;
|
|
}
|
|
|
|
static int crc32_update(struct shash_desc *desc, const u8 *data,
|
|
unsigned int length)
|
|
{
|
|
u32 *crc = shash_desc_ctx(desc);
|
|
|
|
*crc = crc32_armv8_le(*crc, data, length);
|
|
return 0;
|
|
}
|
|
|
|
static int crc32c_update(struct shash_desc *desc, const u8 *data,
|
|
unsigned int length)
|
|
{
|
|
u32 *crc = shash_desc_ctx(desc);
|
|
|
|
*crc = crc32c_armv8_le(*crc, data, length);
|
|
return 0;
|
|
}
|
|
|
|
static int crc32_final(struct shash_desc *desc, u8 *out)
|
|
{
|
|
u32 *crc = shash_desc_ctx(desc);
|
|
|
|
put_unaligned_le32(*crc, out);
|
|
return 0;
|
|
}
|
|
|
|
static int crc32c_final(struct shash_desc *desc, u8 *out)
|
|
{
|
|
u32 *crc = shash_desc_ctx(desc);
|
|
|
|
put_unaligned_le32(~*crc, out);
|
|
return 0;
|
|
}
|
|
|
|
static int crc32_pmull_update(struct shash_desc *desc, const u8 *data,
|
|
unsigned int length)
|
|
{
|
|
u32 *crc = shash_desc_ctx(desc);
|
|
unsigned int l;
|
|
|
|
if (crypto_simd_usable()) {
|
|
if ((u32)data % SCALE_F) {
|
|
l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
|
|
|
|
*crc = fallback_crc32(*crc, data, l);
|
|
|
|
data += l;
|
|
length -= l;
|
|
}
|
|
|
|
if (length >= PMULL_MIN_LEN) {
|
|
l = round_down(length, SCALE_F);
|
|
|
|
kernel_neon_begin();
|
|
*crc = crc32_pmull_le(data, l, *crc);
|
|
kernel_neon_end();
|
|
|
|
data += l;
|
|
length -= l;
|
|
}
|
|
}
|
|
|
|
if (length > 0)
|
|
*crc = fallback_crc32(*crc, data, length);
|
|
|
|
return 0;
|
|
}
|
|
|
|
static int crc32c_pmull_update(struct shash_desc *desc, const u8 *data,
|
|
unsigned int length)
|
|
{
|
|
u32 *crc = shash_desc_ctx(desc);
|
|
unsigned int l;
|
|
|
|
if (crypto_simd_usable()) {
|
|
if ((u32)data % SCALE_F) {
|
|
l = min_t(u32, length, SCALE_F - ((u32)data % SCALE_F));
|
|
|
|
*crc = fallback_crc32c(*crc, data, l);
|
|
|
|
data += l;
|
|
length -= l;
|
|
}
|
|
|
|
if (length >= PMULL_MIN_LEN) {
|
|
l = round_down(length, SCALE_F);
|
|
|
|
kernel_neon_begin();
|
|
*crc = crc32c_pmull_le(data, l, *crc);
|
|
kernel_neon_end();
|
|
|
|
data += l;
|
|
length -= l;
|
|
}
|
|
}
|
|
|
|
if (length > 0)
|
|
*crc = fallback_crc32c(*crc, data, length);
|
|
|
|
return 0;
|
|
}
|
|
|
|
static struct shash_alg crc32_pmull_algs[] = { {
|
|
.setkey = crc32_setkey,
|
|
.init = crc32_init,
|
|
.update = crc32_update,
|
|
.final = crc32_final,
|
|
.descsize = sizeof(u32),
|
|
.digestsize = sizeof(u32),
|
|
|
|
.base.cra_ctxsize = sizeof(u32),
|
|
.base.cra_init = crc32_cra_init,
|
|
.base.cra_name = "crc32",
|
|
.base.cra_driver_name = "crc32-arm-ce",
|
|
.base.cra_priority = 200,
|
|
.base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
|
|
.base.cra_blocksize = 1,
|
|
.base.cra_module = THIS_MODULE,
|
|
}, {
|
|
.setkey = crc32_setkey,
|
|
.init = crc32_init,
|
|
.update = crc32c_update,
|
|
.final = crc32c_final,
|
|
.descsize = sizeof(u32),
|
|
.digestsize = sizeof(u32),
|
|
|
|
.base.cra_ctxsize = sizeof(u32),
|
|
.base.cra_init = crc32c_cra_init,
|
|
.base.cra_name = "crc32c",
|
|
.base.cra_driver_name = "crc32c-arm-ce",
|
|
.base.cra_priority = 200,
|
|
.base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
|
|
.base.cra_blocksize = 1,
|
|
.base.cra_module = THIS_MODULE,
|
|
} };
|
|
|
|
static int __init crc32_pmull_mod_init(void)
|
|
{
|
|
if (elf_hwcap2 & HWCAP2_PMULL) {
|
|
crc32_pmull_algs[0].update = crc32_pmull_update;
|
|
crc32_pmull_algs[1].update = crc32c_pmull_update;
|
|
|
|
if (elf_hwcap2 & HWCAP2_CRC32) {
|
|
fallback_crc32 = crc32_armv8_le;
|
|
fallback_crc32c = crc32c_armv8_le;
|
|
} else {
|
|
fallback_crc32 = crc32_le;
|
|
fallback_crc32c = __crc32c_le;
|
|
}
|
|
} else if (!(elf_hwcap2 & HWCAP2_CRC32)) {
|
|
return -ENODEV;
|
|
}
|
|
|
|
return crypto_register_shashes(crc32_pmull_algs,
|
|
ARRAY_SIZE(crc32_pmull_algs));
|
|
}
|
|
|
|
static void __exit crc32_pmull_mod_exit(void)
|
|
{
|
|
crypto_unregister_shashes(crc32_pmull_algs,
|
|
ARRAY_SIZE(crc32_pmull_algs));
|
|
}
|
|
|
|
static const struct cpu_feature __maybe_unused crc32_cpu_feature[] = {
|
|
{ cpu_feature(CRC32) }, { cpu_feature(PMULL) }, { }
|
|
};
|
|
MODULE_DEVICE_TABLE(cpu, crc32_cpu_feature);
|
|
|
|
module_init(crc32_pmull_mod_init);
|
|
module_exit(crc32_pmull_mod_exit);
|
|
|
|
MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
|
|
MODULE_LICENSE("GPL v2");
|
|
MODULE_ALIAS_CRYPTO("crc32");
|
|
MODULE_ALIAS_CRYPTO("crc32c");
|