2018-07-13 21:51:37 +07:00
|
|
|
// SPDX-License-Identifier: GPL-2.0
|
2017-05-24 21:10:34 +07:00
|
|
|
/*
|
|
|
|
* Copyright (C) 2017 Marvell
|
|
|
|
*
|
|
|
|
* Antoine Tenart <antoine.tenart@free-electrons.com>
|
|
|
|
*/
|
|
|
|
|
2019-09-19 04:25:56 +07:00
|
|
|
#include <asm/unaligned.h>
|
2017-05-24 21:10:34 +07:00
|
|
|
#include <linux/device.h>
|
|
|
|
#include <linux/dma-mapping.h>
|
|
|
|
#include <linux/dmapool.h>
|
2018-05-14 20:11:02 +07:00
|
|
|
#include <crypto/aead.h>
|
2017-05-24 21:10:34 +07:00
|
|
|
#include <crypto/aes.h>
|
2018-05-14 20:11:02 +07:00
|
|
|
#include <crypto/authenc.h>
|
2019-09-19 04:25:56 +07:00
|
|
|
#include <crypto/chacha.h>
|
2019-07-30 20:27:11 +07:00
|
|
|
#include <crypto/ctr.h>
|
2019-08-15 16:01:09 +07:00
|
|
|
#include <crypto/internal/des.h>
|
2019-08-30 14:52:30 +07:00
|
|
|
#include <crypto/gcm.h>
|
|
|
|
#include <crypto/ghash.h>
|
2019-09-19 04:25:57 +07:00
|
|
|
#include <crypto/poly1305.h>
|
2018-05-14 20:11:02 +07:00
|
|
|
#include <crypto/sha.h>
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
#include <crypto/sm3.h>
|
2019-09-13 16:10:36 +07:00
|
|
|
#include <crypto/sm4.h>
|
2019-08-30 14:40:53 +07:00
|
|
|
#include <crypto/xts.h>
|
2017-05-24 21:10:34 +07:00
|
|
|
#include <crypto/skcipher.h>
|
2018-05-14 20:11:02 +07:00
|
|
|
#include <crypto/internal/aead.h>
|
2017-12-11 18:10:55 +07:00
|
|
|
#include <crypto/internal/skcipher.h>
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
#include "safexcel.h"
|
|
|
|
|
|
|
|
enum safexcel_cipher_direction {
|
|
|
|
SAFEXCEL_ENCRYPT,
|
|
|
|
SAFEXCEL_DECRYPT,
|
|
|
|
};
|
|
|
|
|
2018-06-28 22:21:55 +07:00
|
|
|
enum safexcel_cipher_alg {
|
|
|
|
SAFEXCEL_DES,
|
2018-06-28 22:21:56 +07:00
|
|
|
SAFEXCEL_3DES,
|
2018-06-28 22:21:55 +07:00
|
|
|
SAFEXCEL_AES,
|
2019-09-19 04:25:56 +07:00
|
|
|
SAFEXCEL_CHACHA20,
|
2019-09-13 16:10:36 +07:00
|
|
|
SAFEXCEL_SM4,
|
2018-06-28 22:21:55 +07:00
|
|
|
};
|
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
struct safexcel_cipher_ctx {
|
|
|
|
struct safexcel_context base;
|
|
|
|
struct safexcel_crypto_priv *priv;
|
|
|
|
|
|
|
|
u32 mode;
|
2018-06-28 22:21:55 +07:00
|
|
|
enum safexcel_cipher_alg alg;
|
2019-09-17 17:08:00 +07:00
|
|
|
char aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
|
2019-09-19 04:25:57 +07:00
|
|
|
char xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2019-08-30 14:40:53 +07:00
|
|
|
__le32 key[16];
|
2019-07-05 13:49:23 +07:00
|
|
|
u32 nonce;
|
2019-08-30 14:40:53 +07:00
|
|
|
unsigned int key_len, xts;
|
2018-05-14 20:11:02 +07:00
|
|
|
|
|
|
|
/* All the below is AEAD specific */
|
2018-06-28 22:21:55 +07:00
|
|
|
u32 hash_alg;
|
2018-05-14 20:11:02 +07:00
|
|
|
u32 state_sz;
|
2019-10-22 16:01:43 +07:00
|
|
|
__be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
|
|
|
|
__be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
|
2019-08-30 14:52:30 +07:00
|
|
|
|
|
|
|
struct crypto_cipher *hkaes;
|
2019-09-19 04:25:57 +07:00
|
|
|
struct crypto_aead *fback;
|
2017-05-24 21:10:34 +07:00
|
|
|
};
|
|
|
|
|
2017-12-11 18:10:55 +07:00
|
|
|
struct safexcel_cipher_req {
|
2017-12-14 21:26:49 +07:00
|
|
|
enum safexcel_cipher_direction direction;
|
2019-05-27 21:51:06 +07:00
|
|
|
/* Number of result descriptors associated to the request */
|
|
|
|
unsigned int rdescs;
|
2017-12-11 18:10:55 +07:00
|
|
|
bool needs_inv;
|
2019-07-02 21:39:55 +07:00
|
|
|
int nr_src, nr_dst;
|
2017-12-11 18:10:55 +07:00
|
|
|
};
|
|
|
|
|
2019-07-05 13:49:24 +07:00
|
|
|
static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
|
|
|
|
struct safexcel_command_desc *cdesc)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
2019-07-05 13:49:23 +07:00
|
|
|
u32 block_sz = 0;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2019-09-17 17:07:59 +07:00
|
|
|
if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
|
2019-09-17 17:08:00 +07:00
|
|
|
ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
|
2019-08-30 14:42:29 +07:00
|
|
|
cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
|
|
|
|
|
|
/* 32 bit nonce */
|
|
|
|
cdesc->control_data.token[0] = ctx->nonce;
|
|
|
|
/* 64 bit IV part */
|
|
|
|
memcpy(&cdesc->control_data.token[1], iv, 8);
|
2019-09-17 17:07:59 +07:00
|
|
|
|
2019-09-17 17:08:01 +07:00
|
|
|
if (ctx->alg == SAFEXCEL_CHACHA20 ||
|
|
|
|
ctx->xcm == EIP197_XCM_MODE_CCM) {
|
2019-09-17 17:07:59 +07:00
|
|
|
/* 32 bit counter, starting at 0 */
|
|
|
|
cdesc->control_data.token[3] = 0;
|
|
|
|
} else {
|
|
|
|
/* 32 bit counter, start at 1 (big endian!) */
|
2019-10-22 16:01:43 +07:00
|
|
|
cdesc->control_data.token[3] =
|
|
|
|
(__force u32)cpu_to_be32(1);
|
2019-09-17 17:07:59 +07:00
|
|
|
}
|
2019-08-30 14:42:29 +07:00
|
|
|
|
2019-09-19 04:25:56 +07:00
|
|
|
return;
|
2019-09-17 17:07:59 +07:00
|
|
|
} else if (ctx->xcm == EIP197_XCM_MODE_GCM ||
|
|
|
|
(ctx->aead && ctx->alg == SAFEXCEL_CHACHA20)) {
|
2019-09-19 04:25:56 +07:00
|
|
|
cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
|
|
|
2019-09-17 17:07:59 +07:00
|
|
|
/* 96 bit IV part */
|
|
|
|
memcpy(&cdesc->control_data.token[0], iv, 12);
|
|
|
|
|
|
|
|
if (ctx->alg == SAFEXCEL_CHACHA20) {
|
2019-09-19 04:25:57 +07:00
|
|
|
/* 32 bit counter, starting at 0 */
|
|
|
|
cdesc->control_data.token[3] = 0;
|
|
|
|
} else {
|
2019-09-17 17:07:59 +07:00
|
|
|
/* 32 bit counter, start at 1 (big endian!) */
|
2019-10-22 16:01:43 +07:00
|
|
|
*(__be32 *)&cdesc->control_data.token[3] =
|
|
|
|
cpu_to_be32(1);
|
2019-09-19 04:25:57 +07:00
|
|
|
}
|
2019-09-19 04:25:56 +07:00
|
|
|
|
2019-08-30 14:52:30 +07:00
|
|
|
return;
|
2019-09-17 17:07:59 +07:00
|
|
|
} else if (ctx->alg == SAFEXCEL_CHACHA20) {
|
2019-08-30 14:52:30 +07:00
|
|
|
cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
|
|
|
2019-09-17 17:07:59 +07:00
|
|
|
/* 96 bit nonce part */
|
|
|
|
memcpy(&cdesc->control_data.token[0], &iv[4], 12);
|
|
|
|
/* 32 bit counter */
|
|
|
|
cdesc->control_data.token[3] = *(u32 *)iv;
|
2019-08-30 14:52:30 +07:00
|
|
|
|
2019-08-30 14:52:33 +07:00
|
|
|
return;
|
|
|
|
} else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
|
|
|
|
cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
|
|
|
|
|
|
/* Variable length IV part */
|
|
|
|
memcpy(&cdesc->control_data.token[0], iv, 15 - iv[0]);
|
|
|
|
/* Start variable length counter at 0 */
|
|
|
|
memset((u8 *)&cdesc->control_data.token[0] + 15 - iv[0],
|
|
|
|
0, iv[0] + 1);
|
|
|
|
|
2019-08-30 14:42:29 +07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-07-05 13:49:23 +07:00
|
|
|
if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
|
2018-06-28 22:21:55 +07:00
|
|
|
switch (ctx->alg) {
|
|
|
|
case SAFEXCEL_DES:
|
2019-05-27 21:51:03 +07:00
|
|
|
block_sz = DES_BLOCK_SIZE;
|
2018-06-28 22:21:55 +07:00
|
|
|
cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
|
|
break;
|
2018-06-28 22:21:56 +07:00
|
|
|
case SAFEXCEL_3DES:
|
2019-05-27 21:51:03 +07:00
|
|
|
block_sz = DES3_EDE_BLOCK_SIZE;
|
2018-06-28 22:21:56 +07:00
|
|
|
cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
|
|
|
|
break;
|
2019-09-13 16:10:36 +07:00
|
|
|
case SAFEXCEL_SM4:
|
|
|
|
block_sz = SM4_BLOCK_SIZE;
|
|
|
|
cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
|
|
break;
|
2018-06-28 22:21:55 +07:00
|
|
|
case SAFEXCEL_AES:
|
2019-05-27 21:51:03 +07:00
|
|
|
block_sz = AES_BLOCK_SIZE;
|
2018-06-28 22:21:55 +07:00
|
|
|
cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
|
|
|
|
break;
|
2019-09-19 04:25:56 +07:00
|
|
|
default:
|
|
|
|
break;
|
2018-06-28 22:21:55 +07:00
|
|
|
}
|
2019-08-30 14:42:29 +07:00
|
|
|
memcpy(cdesc->control_data.token, iv, block_sz);
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
2019-07-05 13:49:24 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
|
|
|
|
struct safexcel_command_desc *cdesc,
|
|
|
|
u32 length)
|
|
|
|
{
|
|
|
|
struct safexcel_token *token;
|
|
|
|
|
|
|
|
safexcel_cipher_token(ctx, iv, cdesc);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2019-07-05 13:49:23 +07:00
|
|
|
/* skip over worst case IV of 4 dwords, no need to be exact */
|
|
|
|
token = (struct safexcel_token *)(cdesc->control_data.token + 4);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
|
|
token[0].packet_length = length;
|
2018-03-19 15:21:18 +07:00
|
|
|
token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
|
|
|
|
EIP197_TOKEN_STAT_LAST_HASH;
|
2017-05-24 21:10:34 +07:00
|
|
|
token[0].instructions = EIP197_TOKEN_INS_LAST |
|
2019-07-02 21:39:56 +07:00
|
|
|
EIP197_TOKEN_INS_TYPE_CRYPTO |
|
2017-05-24 21:10:34 +07:00
|
|
|
EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:11:02 +07:00
|
|
|
static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
|
|
|
|
struct safexcel_command_desc *cdesc,
|
|
|
|
enum safexcel_cipher_direction direction,
|
|
|
|
u32 cryptlen, u32 assoclen, u32 digestsize)
|
|
|
|
{
|
|
|
|
struct safexcel_token *token;
|
|
|
|
|
2019-07-05 13:49:24 +07:00
|
|
|
safexcel_cipher_token(ctx, iv, cdesc);
|
2018-05-14 20:11:02 +07:00
|
|
|
|
|
|
|
if (direction == SAFEXCEL_ENCRYPT) {
|
2019-07-05 13:49:23 +07:00
|
|
|
/* align end of instruction sequence to end of token */
|
|
|
|
token = (struct safexcel_token *)(cdesc->control_data.token +
|
2019-09-17 17:08:01 +07:00
|
|
|
EIP197_MAX_TOKENS - 14);
|
2019-08-30 14:52:33 +07:00
|
|
|
|
2019-09-17 17:08:01 +07:00
|
|
|
token[13].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
|
|
token[13].packet_length = digestsize;
|
|
|
|
token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
|
2019-08-30 14:52:33 +07:00
|
|
|
EIP197_TOKEN_STAT_LAST_PACKET;
|
2019-09-17 17:08:01 +07:00
|
|
|
token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
|
2019-08-30 14:52:33 +07:00
|
|
|
EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
|
2018-05-14 20:11:02 +07:00
|
|
|
} else {
|
2019-08-30 14:43:01 +07:00
|
|
|
cryptlen -= digestsize;
|
|
|
|
|
2019-07-05 13:49:23 +07:00
|
|
|
/* align end of instruction sequence to end of token */
|
|
|
|
token = (struct safexcel_token *)(cdesc->control_data.token +
|
2019-09-17 17:08:01 +07:00
|
|
|
EIP197_MAX_TOKENS - 15);
|
2019-08-30 14:52:33 +07:00
|
|
|
|
2019-09-17 17:08:01 +07:00
|
|
|
token[13].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
|
|
|
|
token[13].packet_length = digestsize;
|
|
|
|
token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
|
2019-08-30 14:52:33 +07:00
|
|
|
EIP197_TOKEN_STAT_LAST_PACKET;
|
2019-09-17 17:08:01 +07:00
|
|
|
token[13].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
|
2019-08-30 14:52:33 +07:00
|
|
|
|
2019-09-17 17:08:01 +07:00
|
|
|
token[14].opcode = EIP197_TOKEN_OPCODE_VERIFY;
|
|
|
|
token[14].packet_length = digestsize |
|
2019-08-30 14:52:33 +07:00
|
|
|
EIP197_TOKEN_HASH_RESULT_VERIFY;
|
2019-09-17 17:08:01 +07:00
|
|
|
token[14].stat = EIP197_TOKEN_STAT_LAST_HASH |
|
2019-08-30 14:52:33 +07:00
|
|
|
EIP197_TOKEN_STAT_LAST_PACKET;
|
2019-09-17 17:08:01 +07:00
|
|
|
token[14].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
|
2018-05-14 20:11:02 +07:00
|
|
|
}
|
2019-07-05 13:49:23 +07:00
|
|
|
|
2019-09-19 04:25:57 +07:00
|
|
|
if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
|
2019-09-17 17:08:00 +07:00
|
|
|
/* For ESP mode (and not GMAC), skip over the IV */
|
2019-09-17 17:08:01 +07:00
|
|
|
token[8].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
|
|
token[8].packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
|
2019-09-19 04:25:57 +07:00
|
|
|
|
|
|
|
assoclen -= EIP197_AEAD_IPSEC_IV_SIZE;
|
|
|
|
}
|
|
|
|
|
2019-08-30 14:52:33 +07:00
|
|
|
token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
|
|
token[6].packet_length = assoclen;
|
2019-09-19 04:25:57 +07:00
|
|
|
token[6].instructions = EIP197_TOKEN_INS_LAST |
|
|
|
|
EIP197_TOKEN_INS_TYPE_HASH;
|
2019-08-30 14:52:30 +07:00
|
|
|
|
2019-09-19 04:25:57 +07:00
|
|
|
if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
|
2019-09-17 17:08:01 +07:00
|
|
|
token[11].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
|
|
|
|
token[11].packet_length = cryptlen;
|
|
|
|
token[11].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
2019-09-17 17:08:00 +07:00
|
|
|
if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
|
|
|
|
token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
|
|
/* Do not send to crypt engine in case of GMAC */
|
2019-09-17 17:08:01 +07:00
|
|
|
token[11].instructions = EIP197_TOKEN_INS_LAST |
|
2019-09-17 17:08:00 +07:00
|
|
|
EIP197_TOKEN_INS_TYPE_HASH |
|
|
|
|
EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
|
|
} else {
|
2019-09-17 17:08:01 +07:00
|
|
|
token[11].instructions = EIP197_TOKEN_INS_LAST |
|
2019-09-17 17:08:00 +07:00
|
|
|
EIP197_TOKEN_INS_TYPE_CRYPTO |
|
|
|
|
EIP197_TOKEN_INS_TYPE_HASH |
|
|
|
|
EIP197_TOKEN_INS_TYPE_OUTPUT;
|
|
|
|
}
|
2019-08-30 14:52:33 +07:00
|
|
|
} else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
|
|
|
|
token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
2019-07-05 13:49:24 +07:00
|
|
|
}
|
2019-08-30 14:52:30 +07:00
|
|
|
|
2019-08-30 14:52:33 +07:00
|
|
|
if (!ctx->xcm)
|
|
|
|
return;
|
|
|
|
|
2019-09-17 17:08:01 +07:00
|
|
|
token[9].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
|
|
|
|
token[9].packet_length = 0;
|
|
|
|
token[9].instructions = AES_BLOCK_SIZE;
|
2019-08-30 14:52:33 +07:00
|
|
|
|
2019-09-17 17:08:01 +07:00
|
|
|
token[10].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
|
|
token[10].packet_length = AES_BLOCK_SIZE;
|
|
|
|
token[10].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
|
|
|
|
EIP197_TOKEN_INS_TYPE_CRYPTO;
|
2019-08-30 14:52:33 +07:00
|
|
|
|
2019-09-19 04:25:57 +07:00
|
|
|
if (ctx->xcm != EIP197_XCM_MODE_GCM) {
|
2019-09-17 17:08:01 +07:00
|
|
|
u8 *final_iv = (u8 *)cdesc->control_data.token;
|
2019-08-30 14:52:33 +07:00
|
|
|
u8 *cbcmaciv = (u8 *)&token[1];
|
2019-10-22 16:01:43 +07:00
|
|
|
__le32 *aadlen = (__le32 *)&token[5];
|
2019-08-30 14:52:33 +07:00
|
|
|
|
|
|
|
/* Construct IV block B0 for the CBC-MAC */
|
|
|
|
token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
|
|
token[0].packet_length = AES_BLOCK_SIZE +
|
|
|
|
((assoclen > 0) << 1);
|
|
|
|
token[0].instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
|
|
|
|
EIP197_TOKEN_INS_TYPE_HASH;
|
|
|
|
/* Variable length IV part */
|
2019-09-17 17:08:01 +07:00
|
|
|
memcpy(cbcmaciv, final_iv, 15 - final_iv[0]);
|
2019-08-30 14:52:33 +07:00
|
|
|
/* fixup flags byte */
|
|
|
|
cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2);
|
|
|
|
/* Clear upper bytes of variable message length to 0 */
|
2019-09-17 17:08:01 +07:00
|
|
|
memset(cbcmaciv + 15 - final_iv[0], 0, final_iv[0] - 1);
|
2019-08-30 14:52:33 +07:00
|
|
|
/* insert lower 2 bytes of message length */
|
|
|
|
cbcmaciv[14] = cryptlen >> 8;
|
|
|
|
cbcmaciv[15] = cryptlen & 255;
|
2019-08-30 14:52:30 +07:00
|
|
|
|
2019-08-30 14:52:33 +07:00
|
|
|
if (assoclen) {
|
2019-10-22 16:01:43 +07:00
|
|
|
*aadlen = cpu_to_le32((assoclen >> 8) |
|
|
|
|
((assoclen & 0xff) << 8));
|
2019-08-30 14:52:33 +07:00
|
|
|
assoclen += 2;
|
|
|
|
}
|
2019-08-30 14:52:30 +07:00
|
|
|
|
2019-08-30 14:52:33 +07:00
|
|
|
token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
|
|
|
|
|
|
/* Align AAD data towards hash engine */
|
|
|
|
token[7].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
|
|
|
assoclen &= 15;
|
|
|
|
token[7].packet_length = assoclen ? 16 - assoclen : 0;
|
|
|
|
|
|
|
|
if (likely(cryptlen)) {
|
|
|
|
token[7].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
|
|
|
|
|
|
|
/* Align crypto data towards hash engine */
|
2019-09-17 17:08:01 +07:00
|
|
|
token[11].stat = 0;
|
2019-08-30 14:52:33 +07:00
|
|
|
|
2019-09-17 17:08:01 +07:00
|
|
|
token[12].opcode = EIP197_TOKEN_OPCODE_INSERT;
|
2019-08-30 14:52:33 +07:00
|
|
|
cryptlen &= 15;
|
2019-09-17 17:08:01 +07:00
|
|
|
token[12].packet_length = cryptlen ? 16 - cryptlen : 0;
|
|
|
|
token[12].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
|
|
token[12].instructions = EIP197_TOKEN_INS_TYPE_HASH;
|
2019-08-30 14:52:33 +07:00
|
|
|
} else {
|
|
|
|
token[7].stat = EIP197_TOKEN_STAT_LAST_HASH;
|
|
|
|
token[7].instructions = EIP197_TOKEN_INS_LAST |
|
|
|
|
EIP197_TOKEN_INS_TYPE_HASH;
|
|
|
|
}
|
2019-08-30 14:52:30 +07:00
|
|
|
}
|
2018-05-14 20:11:02 +07:00
|
|
|
}
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
2017-12-14 21:26:58 +07:00
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
2017-05-24 21:10:34 +07:00
|
|
|
struct crypto_aes_ctx aes;
|
|
|
|
int ret, i;
|
|
|
|
|
2019-07-03 02:41:27 +07:00
|
|
|
ret = aes_expandkey(&aes, key, len);
|
2017-05-24 21:10:34 +07:00
|
|
|
if (ret) {
|
|
|
|
crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2018-06-28 22:15:35 +07:00
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
2017-12-14 21:26:47 +07:00
|
|
|
for (i = 0; i < len / sizeof(u32); i++) {
|
2019-10-22 16:01:43 +07:00
|
|
|
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
2017-12-14 21:26:47 +07:00
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
break;
|
|
|
|
}
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < len / sizeof(u32); i++)
|
|
|
|
ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
|
|
|
|
|
|
|
|
ctx->key_len = len;
|
|
|
|
|
|
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2019-07-05 13:49:22 +07:00
|
|
|
static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
|
|
|
|
unsigned int len)
|
2018-05-14 20:11:02 +07:00
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_ahash_export_state istate, ostate;
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
struct crypto_authenc_keys keys;
|
2019-07-05 13:49:24 +07:00
|
|
|
struct crypto_aes_ctx aes;
|
2019-10-22 16:01:43 +07:00
|
|
|
int err = -EINVAL, i;
|
2018-05-14 20:11:02 +07:00
|
|
|
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
|
2018-05-14 20:11:02 +07:00
|
|
|
goto badkey;
|
|
|
|
|
2019-07-05 13:49:24 +07:00
|
|
|
if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
/* Must have at least space for the nonce here */
|
|
|
|
if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
|
2019-07-05 13:49:24 +07:00
|
|
|
goto badkey;
|
|
|
|
/* last 4 bytes of key are the nonce! */
|
2019-07-30 20:27:11 +07:00
|
|
|
ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
|
|
|
|
CTR_RFC3686_NONCE_SIZE);
|
2019-07-05 13:49:24 +07:00
|
|
|
/* exclude the nonce here */
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
|
2019-07-05 13:49:24 +07:00
|
|
|
}
|
2018-05-14 20:11:02 +07:00
|
|
|
|
|
|
|
/* Encryption key */
|
2019-07-05 13:49:24 +07:00
|
|
|
switch (ctx->alg) {
|
2019-09-14 03:04:44 +07:00
|
|
|
case SAFEXCEL_DES:
|
|
|
|
err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
|
|
|
|
if (unlikely(err))
|
|
|
|
goto badkey_expflags;
|
|
|
|
break;
|
2019-07-05 13:49:24 +07:00
|
|
|
case SAFEXCEL_3DES:
|
2019-08-15 16:00:55 +07:00
|
|
|
err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
|
2019-07-05 13:49:22 +07:00
|
|
|
if (unlikely(err))
|
2019-07-05 13:49:24 +07:00
|
|
|
goto badkey_expflags;
|
|
|
|
break;
|
|
|
|
case SAFEXCEL_AES:
|
|
|
|
err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
|
|
|
|
if (unlikely(err))
|
|
|
|
goto badkey;
|
|
|
|
break;
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
case SAFEXCEL_SM4:
|
|
|
|
if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
|
|
|
|
goto badkey;
|
|
|
|
break;
|
2019-07-05 13:49:24 +07:00
|
|
|
default:
|
|
|
|
dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
|
|
|
|
goto badkey;
|
2019-07-05 13:49:22 +07:00
|
|
|
}
|
|
|
|
|
2019-10-22 16:01:43 +07:00
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
|
|
for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
|
2019-11-08 16:00:21 +07:00
|
|
|
if (le32_to_cpu(ctx->key[i]) !=
|
|
|
|
((u32 *)keys.enckey)[i]) {
|
2019-10-22 16:01:43 +07:00
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-05-14 20:11:02 +07:00
|
|
|
|
|
|
|
/* Auth key */
|
2018-06-28 22:21:55 +07:00
|
|
|
switch (ctx->hash_alg) {
|
2018-05-14 20:11:04 +07:00
|
|
|
case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
|
|
|
|
if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
|
|
|
|
keys.authkeylen, &istate, &ostate))
|
|
|
|
goto badkey;
|
|
|
|
break;
|
2018-05-14 20:11:03 +07:00
|
|
|
case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
|
|
|
|
if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
|
|
|
|
keys.authkeylen, &istate, &ostate))
|
|
|
|
goto badkey;
|
|
|
|
break;
|
|
|
|
case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
|
|
|
|
if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
|
|
|
|
keys.authkeylen, &istate, &ostate))
|
|
|
|
goto badkey;
|
|
|
|
break;
|
2018-05-29 19:13:52 +07:00
|
|
|
case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
|
|
|
|
if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
|
|
|
|
keys.authkeylen, &istate, &ostate))
|
|
|
|
goto badkey;
|
|
|
|
break;
|
2018-05-29 19:13:48 +07:00
|
|
|
case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
|
|
|
|
if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
|
|
|
|
keys.authkeylen, &istate, &ostate))
|
|
|
|
goto badkey;
|
|
|
|
break;
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
|
|
|
|
if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
|
|
|
|
keys.authkeylen, &istate, &ostate))
|
|
|
|
goto badkey;
|
|
|
|
break;
|
2018-05-14 20:11:03 +07:00
|
|
|
default:
|
2019-10-08 15:24:28 +07:00
|
|
|
dev_err(priv->dev, "aead: unsupported hash algorithm\n");
|
2018-05-14 20:11:02 +07:00
|
|
|
goto badkey;
|
2018-05-14 20:11:03 +07:00
|
|
|
}
|
2018-05-14 20:11:02 +07:00
|
|
|
|
|
|
|
crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
|
|
|
|
CRYPTO_TFM_RES_MASK);
|
|
|
|
|
2018-06-28 22:15:35 +07:00
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
|
2018-05-14 20:11:02 +07:00
|
|
|
(memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
|
|
|
|
memcmp(ctx->opad, ostate.state, ctx->state_sz)))
|
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
|
|
|
|
/* Now copy the keys into the context */
|
2019-10-22 16:01:43 +07:00
|
|
|
for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
|
2019-11-08 16:00:21 +07:00
|
|
|
ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
|
2018-05-14 20:11:02 +07:00
|
|
|
ctx->key_len = keys.enckeylen;
|
|
|
|
|
|
|
|
memcpy(ctx->ipad, &istate.state, ctx->state_sz);
|
|
|
|
memcpy(ctx->opad, &ostate.state, ctx->state_sz);
|
|
|
|
|
|
|
|
memzero_explicit(&keys, sizeof(keys));
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
badkey:
|
|
|
|
crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
2019-07-05 13:49:24 +07:00
|
|
|
badkey_expflags:
|
2018-05-14 20:11:02 +07:00
|
|
|
memzero_explicit(&keys, sizeof(keys));
|
2019-07-05 13:49:24 +07:00
|
|
|
return err;
|
2018-05-14 20:11:02 +07:00
|
|
|
}
|
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
|
2017-12-14 21:26:49 +07:00
|
|
|
struct crypto_async_request *async,
|
2018-05-14 20:10:56 +07:00
|
|
|
struct safexcel_cipher_req *sreq,
|
2017-05-24 21:10:34 +07:00
|
|
|
struct safexcel_command_desc *cdesc)
|
|
|
|
{
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
2019-08-30 14:43:01 +07:00
|
|
|
int ctrl_size = ctx->key_len / sizeof(u32);
|
|
|
|
|
|
|
|
cdesc->control_data.control1 = ctx->mode;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-05-14 20:11:02 +07:00
|
|
|
if (ctx->aead) {
|
2019-08-30 14:43:01 +07:00
|
|
|
/* Take in account the ipad+opad digests */
|
2019-08-30 14:52:30 +07:00
|
|
|
if (ctx->xcm) {
|
|
|
|
ctrl_size += ctx->state_sz / sizeof(u32);
|
2019-08-30 14:43:01 +07:00
|
|
|
cdesc->control_data.control0 =
|
|
|
|
CONTEXT_CONTROL_KEY_EN |
|
2019-08-30 14:52:30 +07:00
|
|
|
CONTEXT_CONTROL_DIGEST_XCM |
|
2019-08-30 14:43:01 +07:00
|
|
|
ctx->hash_alg |
|
|
|
|
CONTEXT_CONTROL_SIZE(ctrl_size);
|
2019-09-19 04:25:57 +07:00
|
|
|
} else if (ctx->alg == SAFEXCEL_CHACHA20) {
|
|
|
|
/* Chacha20-Poly1305 */
|
|
|
|
cdesc->control_data.control0 =
|
|
|
|
CONTEXT_CONTROL_KEY_EN |
|
|
|
|
CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
|
|
|
|
(sreq->direction == SAFEXCEL_ENCRYPT ?
|
|
|
|
CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
|
|
|
|
CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
|
|
|
|
ctx->hash_alg |
|
|
|
|
CONTEXT_CONTROL_SIZE(ctrl_size);
|
|
|
|
return 0;
|
2019-08-30 14:52:30 +07:00
|
|
|
} else {
|
|
|
|
ctrl_size += ctx->state_sz / sizeof(u32) * 2;
|
2019-08-30 14:43:01 +07:00
|
|
|
cdesc->control_data.control0 =
|
|
|
|
CONTEXT_CONTROL_KEY_EN |
|
2019-08-30 14:52:30 +07:00
|
|
|
CONTEXT_CONTROL_DIGEST_HMAC |
|
2019-08-30 14:43:01 +07:00
|
|
|
ctx->hash_alg |
|
|
|
|
CONTEXT_CONTROL_SIZE(ctrl_size);
|
2019-08-30 14:52:30 +07:00
|
|
|
}
|
2019-08-30 14:52:33 +07:00
|
|
|
|
2019-09-17 17:08:00 +07:00
|
|
|
if (sreq->direction == SAFEXCEL_ENCRYPT &&
|
|
|
|
(ctx->xcm == EIP197_XCM_MODE_CCM ||
|
|
|
|
ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
|
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
|
|
|
|
else if (sreq->direction == SAFEXCEL_ENCRYPT)
|
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
|
|
|
|
else if (ctx->xcm == EIP197_XCM_MODE_CCM)
|
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
|
2019-08-30 14:52:30 +07:00
|
|
|
else
|
|
|
|
cdesc->control_data.control0 |=
|
2019-09-17 17:08:00 +07:00
|
|
|
CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
|
2018-05-14 20:11:02 +07:00
|
|
|
} else {
|
2019-08-30 14:43:01 +07:00
|
|
|
if (sreq->direction == SAFEXCEL_ENCRYPT)
|
|
|
|
cdesc->control_data.control0 =
|
|
|
|
CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
|
|
|
|
CONTEXT_CONTROL_KEY_EN |
|
|
|
|
CONTEXT_CONTROL_SIZE(ctrl_size);
|
|
|
|
else
|
|
|
|
cdesc->control_data.control0 =
|
|
|
|
CONTEXT_CONTROL_TYPE_CRYPTO_IN |
|
|
|
|
CONTEXT_CONTROL_KEY_EN |
|
|
|
|
CONTEXT_CONTROL_SIZE(ctrl_size);
|
2018-05-14 20:11:02 +07:00
|
|
|
}
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-06-28 22:21:55 +07:00
|
|
|
if (ctx->alg == SAFEXCEL_DES) {
|
2019-08-30 14:43:01 +07:00
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_CRYPTO_ALG_DES;
|
2018-06-28 22:21:56 +07:00
|
|
|
} else if (ctx->alg == SAFEXCEL_3DES) {
|
2019-08-30 14:43:01 +07:00
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_CRYPTO_ALG_3DES;
|
2018-06-28 22:21:55 +07:00
|
|
|
} else if (ctx->alg == SAFEXCEL_AES) {
|
2019-08-30 14:40:53 +07:00
|
|
|
switch (ctx->key_len >> ctx->xts) {
|
2018-06-28 22:21:55 +07:00
|
|
|
case AES_KEYSIZE_128:
|
2019-08-30 14:43:01 +07:00
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_CRYPTO_ALG_AES128;
|
2018-06-28 22:21:55 +07:00
|
|
|
break;
|
|
|
|
case AES_KEYSIZE_192:
|
2019-08-30 14:43:01 +07:00
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_CRYPTO_ALG_AES192;
|
2018-06-28 22:21:55 +07:00
|
|
|
break;
|
|
|
|
case AES_KEYSIZE_256:
|
2019-08-30 14:43:01 +07:00
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_CRYPTO_ALG_AES256;
|
2018-06-28 22:21:55 +07:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
dev_err(priv->dev, "aes keysize not supported: %u\n",
|
2019-08-30 14:40:53 +07:00
|
|
|
ctx->key_len >> ctx->xts);
|
2018-06-28 22:21:55 +07:00
|
|
|
return -EINVAL;
|
|
|
|
}
|
2019-09-19 04:25:56 +07:00
|
|
|
} else if (ctx->alg == SAFEXCEL_CHACHA20) {
|
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
|
2019-09-13 16:10:36 +07:00
|
|
|
} else if (ctx->alg == SAFEXCEL_SM4) {
|
|
|
|
cdesc->control_data.control0 |=
|
|
|
|
CONTEXT_CONTROL_CRYPTO_ALG_SM4;
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
2018-05-14 20:10:58 +07:00
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2017-12-11 18:10:55 +07:00
|
|
|
static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
|
|
|
|
struct crypto_async_request *async,
|
2018-05-14 20:10:56 +07:00
|
|
|
struct scatterlist *src,
|
|
|
|
struct scatterlist *dst,
|
|
|
|
unsigned int cryptlen,
|
|
|
|
struct safexcel_cipher_req *sreq,
|
2017-12-11 18:10:55 +07:00
|
|
|
bool *should_complete, int *ret)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
2019-07-02 21:39:54 +07:00
|
|
|
struct skcipher_request *areq = skcipher_request_cast(async);
|
|
|
|
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
|
2017-05-24 21:10:34 +07:00
|
|
|
struct safexcel_result_desc *rdesc;
|
|
|
|
int ndesc = 0;
|
|
|
|
|
|
|
|
*ret = 0;
|
|
|
|
|
2019-05-27 21:51:06 +07:00
|
|
|
if (unlikely(!sreq->rdescs))
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
while (sreq->rdescs--) {
|
2017-05-24 21:10:34 +07:00
|
|
|
rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
|
|
|
|
if (IS_ERR(rdesc)) {
|
|
|
|
dev_err(priv->dev,
|
|
|
|
"cipher: result: could not retrieve the result descriptor\n");
|
|
|
|
*ret = PTR_ERR(rdesc);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:11:01 +07:00
|
|
|
if (likely(!*ret))
|
|
|
|
*ret = safexcel_rdesc_check_errors(priv, rdesc);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
ndesc++;
|
2019-05-27 21:51:06 +07:00
|
|
|
}
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
safexcel_complete(priv, ring);
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
if (src == dst) {
|
2019-07-02 21:39:55 +07:00
|
|
|
dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
|
2017-05-24 21:10:34 +07:00
|
|
|
} else {
|
2019-07-02 21:39:55 +07:00
|
|
|
dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
|
|
|
|
dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
2019-07-02 21:39:54 +07:00
|
|
|
/*
|
|
|
|
* Update IV in req from last crypto output word for CBC modes
|
|
|
|
*/
|
|
|
|
if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
|
|
|
|
(sreq->direction == SAFEXCEL_ENCRYPT)) {
|
|
|
|
/* For encrypt take the last output word */
|
2019-07-02 21:39:55 +07:00
|
|
|
sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
|
2019-07-02 21:39:54 +07:00
|
|
|
crypto_skcipher_ivsize(skcipher),
|
|
|
|
(cryptlen -
|
|
|
|
crypto_skcipher_ivsize(skcipher)));
|
|
|
|
}
|
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
*should_complete = true;
|
|
|
|
|
|
|
|
return ndesc;
|
|
|
|
}
|
|
|
|
|
2018-06-28 22:21:55 +07:00
|
|
|
static int safexcel_send_req(struct crypto_async_request *base, int ring,
|
2018-05-14 20:10:56 +07:00
|
|
|
struct safexcel_cipher_req *sreq,
|
|
|
|
struct scatterlist *src, struct scatterlist *dst,
|
2018-05-14 20:11:02 +07:00
|
|
|
unsigned int cryptlen, unsigned int assoclen,
|
|
|
|
unsigned int digestsize, u8 *iv, int *commands,
|
2018-05-14 20:10:56 +07:00
|
|
|
int *results)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
2019-07-02 21:39:54 +07:00
|
|
|
struct skcipher_request *areq = skcipher_request_cast(base);
|
|
|
|
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
|
2018-05-14 20:10:56 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
|
2017-05-24 21:10:34 +07:00
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
struct safexcel_command_desc *cdesc;
|
2019-07-02 21:39:55 +07:00
|
|
|
struct safexcel_command_desc *first_cdesc = NULL;
|
2018-07-13 22:43:16 +07:00
|
|
|
struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
|
2017-05-24 21:10:34 +07:00
|
|
|
struct scatterlist *sg;
|
2019-07-02 21:39:55 +07:00
|
|
|
unsigned int totlen;
|
|
|
|
unsigned int totlen_src = cryptlen + assoclen;
|
|
|
|
unsigned int totlen_dst = totlen_src;
|
|
|
|
int n_cdesc = 0, n_rdesc = 0;
|
|
|
|
int queued, i, ret = 0;
|
|
|
|
bool first = true;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2019-07-02 21:39:55 +07:00
|
|
|
sreq->nr_src = sg_nents_for_len(src, totlen_src);
|
|
|
|
|
|
|
|
if (ctx->aead) {
|
|
|
|
/*
|
|
|
|
* AEAD has auth tag appended to output for encrypt and
|
|
|
|
* removed from the output for decrypt!
|
|
|
|
*/
|
|
|
|
if (sreq->direction == SAFEXCEL_DECRYPT)
|
|
|
|
totlen_dst -= digestsize;
|
|
|
|
else
|
|
|
|
totlen_dst += digestsize;
|
|
|
|
|
|
|
|
memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
|
|
|
|
ctx->ipad, ctx->state_sz);
|
2019-08-30 14:52:30 +07:00
|
|
|
if (!ctx->xcm)
|
|
|
|
memcpy(ctx->base.ctxr->data + (ctx->key_len +
|
|
|
|
ctx->state_sz) / sizeof(u32), ctx->opad,
|
|
|
|
ctx->state_sz);
|
2019-07-02 21:39:55 +07:00
|
|
|
} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
|
|
|
|
(sreq->direction == SAFEXCEL_DECRYPT)) {
|
2019-07-02 21:39:54 +07:00
|
|
|
/*
|
|
|
|
* Save IV from last crypto input word for CBC modes in decrypt
|
|
|
|
* direction. Need to do this first in case of inplace operation
|
|
|
|
* as it will be overwritten.
|
|
|
|
*/
|
2019-07-02 21:39:55 +07:00
|
|
|
sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
|
2019-07-02 21:39:54 +07:00
|
|
|
crypto_skcipher_ivsize(skcipher),
|
2019-07-02 21:39:55 +07:00
|
|
|
(totlen_src -
|
2019-07-02 21:39:54 +07:00
|
|
|
crypto_skcipher_ivsize(skcipher)));
|
|
|
|
}
|
|
|
|
|
2019-07-02 21:39:55 +07:00
|
|
|
sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Remember actual input length, source buffer length may be
|
|
|
|
* updated in case of inline operation below.
|
|
|
|
*/
|
|
|
|
totlen = totlen_src;
|
|
|
|
queued = totlen_src;
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
if (src == dst) {
|
2019-07-02 21:39:55 +07:00
|
|
|
sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
|
|
|
|
sreq->nr_dst = sreq->nr_src;
|
|
|
|
if (unlikely((totlen_src || totlen_dst) &&
|
|
|
|
(sreq->nr_src <= 0))) {
|
|
|
|
dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
|
|
|
|
max(totlen_src, totlen_dst));
|
2017-05-24 21:10:34 +07:00
|
|
|
return -EINVAL;
|
2019-07-02 21:39:55 +07:00
|
|
|
}
|
|
|
|
dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
|
2017-05-24 21:10:34 +07:00
|
|
|
} else {
|
2019-07-02 21:39:55 +07:00
|
|
|
if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
|
|
|
|
dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
|
|
|
|
totlen_src);
|
2017-05-24 21:10:34 +07:00
|
|
|
return -EINVAL;
|
2019-07-02 21:39:55 +07:00
|
|
|
}
|
|
|
|
dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2019-07-02 21:39:55 +07:00
|
|
|
if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
|
|
|
|
dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
|
|
|
|
totlen_dst);
|
|
|
|
dma_unmap_sg(priv->dev, src, sreq->nr_src,
|
|
|
|
DMA_TO_DEVICE);
|
2017-05-24 21:10:34 +07:00
|
|
|
return -EINVAL;
|
|
|
|
}
|
2019-07-02 21:39:55 +07:00
|
|
|
dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
|
|
|
|
|
2019-07-02 21:39:55 +07:00
|
|
|
/* The EIP cannot deal with zero length input packets! */
|
|
|
|
if (totlen == 0)
|
|
|
|
totlen = 1;
|
2018-05-14 20:11:02 +07:00
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
/* command descriptors */
|
2019-07-02 21:39:55 +07:00
|
|
|
for_each_sg(src, sg, sreq->nr_src, i) {
|
2017-05-24 21:10:34 +07:00
|
|
|
int len = sg_dma_len(sg);
|
|
|
|
|
|
|
|
/* Do not overflow the request */
|
|
|
|
if (queued - len < 0)
|
|
|
|
len = queued;
|
|
|
|
|
2019-07-02 21:39:55 +07:00
|
|
|
cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
|
|
|
|
!(queued - len),
|
2018-05-14 20:11:02 +07:00
|
|
|
sg_dma_address(sg), len, totlen,
|
2017-05-24 21:10:34 +07:00
|
|
|
ctx->base.ctxr_dma);
|
|
|
|
if (IS_ERR(cdesc)) {
|
|
|
|
/* No space left in the command descriptor ring */
|
|
|
|
ret = PTR_ERR(cdesc);
|
|
|
|
goto cdesc_rollback;
|
|
|
|
}
|
|
|
|
n_cdesc++;
|
|
|
|
|
|
|
|
if (n_cdesc == 1) {
|
2019-07-02 21:39:55 +07:00
|
|
|
first_cdesc = cdesc;
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
queued -= len;
|
|
|
|
if (!queued)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2019-07-02 21:39:55 +07:00
|
|
|
if (unlikely(!n_cdesc)) {
|
|
|
|
/*
|
|
|
|
* Special case: zero length input buffer.
|
|
|
|
* The engine always needs the 1st command descriptor, however!
|
|
|
|
*/
|
|
|
|
first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
|
|
|
|
ctx->base.ctxr_dma);
|
|
|
|
n_cdesc = 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Add context control words and token to first command descriptor */
|
|
|
|
safexcel_context_control(ctx, base, sreq, first_cdesc);
|
|
|
|
if (ctx->aead)
|
|
|
|
safexcel_aead_token(ctx, iv, first_cdesc,
|
|
|
|
sreq->direction, cryptlen,
|
|
|
|
assoclen, digestsize);
|
|
|
|
else
|
|
|
|
safexcel_skcipher_token(ctx, iv, first_cdesc,
|
|
|
|
cryptlen);
|
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
/* result descriptors */
|
2019-07-02 21:39:55 +07:00
|
|
|
for_each_sg(dst, sg, sreq->nr_dst, i) {
|
|
|
|
bool last = (i == sreq->nr_dst - 1);
|
2017-05-24 21:10:34 +07:00
|
|
|
u32 len = sg_dma_len(sg);
|
|
|
|
|
2019-07-02 21:39:55 +07:00
|
|
|
/* only allow the part of the buffer we know we need */
|
|
|
|
if (len > totlen_dst)
|
|
|
|
len = totlen_dst;
|
|
|
|
if (unlikely(!len))
|
|
|
|
break;
|
|
|
|
totlen_dst -= len;
|
|
|
|
|
|
|
|
/* skip over AAD space in buffer - not written */
|
|
|
|
if (assoclen) {
|
|
|
|
if (assoclen >= len) {
|
|
|
|
assoclen -= len;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
rdesc = safexcel_add_rdesc(priv, ring, first, last,
|
|
|
|
sg_dma_address(sg) +
|
|
|
|
assoclen,
|
|
|
|
len - assoclen);
|
|
|
|
assoclen = 0;
|
|
|
|
} else {
|
|
|
|
rdesc = safexcel_add_rdesc(priv, ring, first, last,
|
|
|
|
sg_dma_address(sg),
|
|
|
|
len);
|
|
|
|
}
|
2017-05-24 21:10:34 +07:00
|
|
|
if (IS_ERR(rdesc)) {
|
|
|
|
/* No space left in the result descriptor ring */
|
|
|
|
ret = PTR_ERR(rdesc);
|
|
|
|
goto rdesc_rollback;
|
|
|
|
}
|
2019-07-02 21:39:55 +07:00
|
|
|
if (first) {
|
2018-06-28 22:21:57 +07:00
|
|
|
first_rdesc = rdesc;
|
2019-07-02 21:39:55 +07:00
|
|
|
first = false;
|
|
|
|
}
|
2017-05-24 21:10:34 +07:00
|
|
|
n_rdesc++;
|
|
|
|
}
|
|
|
|
|
2019-07-02 21:39:55 +07:00
|
|
|
if (unlikely(first)) {
|
|
|
|
/*
|
|
|
|
* Special case: AEAD decrypt with only AAD data.
|
|
|
|
* In this case there is NO output data from the engine,
|
|
|
|
* but the engine still needs a result descriptor!
|
|
|
|
* Create a dummy one just for catching the result token.
|
|
|
|
*/
|
|
|
|
rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
|
|
|
|
if (IS_ERR(rdesc)) {
|
|
|
|
/* No space left in the result descriptor ring */
|
|
|
|
ret = PTR_ERR(rdesc);
|
|
|
|
goto rdesc_rollback;
|
|
|
|
}
|
|
|
|
first_rdesc = rdesc;
|
|
|
|
n_rdesc = 1;
|
|
|
|
}
|
|
|
|
|
2018-06-28 22:21:57 +07:00
|
|
|
safexcel_rdr_req_set(priv, ring, first_rdesc, base);
|
2017-06-15 14:56:23 +07:00
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
*commands = n_cdesc;
|
2017-06-15 14:56:22 +07:00
|
|
|
*results = n_rdesc;
|
2017-05-24 21:10:34 +07:00
|
|
|
return 0;
|
|
|
|
|
|
|
|
rdesc_rollback:
|
|
|
|
for (i = 0; i < n_rdesc; i++)
|
|
|
|
safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
|
|
|
|
cdesc_rollback:
|
|
|
|
for (i = 0; i < n_cdesc; i++)
|
|
|
|
safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
if (src == dst) {
|
2019-07-02 21:39:55 +07:00
|
|
|
dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
|
2017-05-24 21:10:34 +07:00
|
|
|
} else {
|
2019-07-02 21:39:55 +07:00
|
|
|
dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
|
|
|
|
dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
|
|
|
|
int ring,
|
2018-05-14 20:10:56 +07:00
|
|
|
struct crypto_async_request *base,
|
2019-05-27 21:51:06 +07:00
|
|
|
struct safexcel_cipher_req *sreq,
|
2017-05-24 21:10:34 +07:00
|
|
|
bool *should_complete, int *ret)
|
|
|
|
{
|
2018-05-14 20:10:56 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
|
2017-05-24 21:10:34 +07:00
|
|
|
struct safexcel_result_desc *rdesc;
|
|
|
|
int ndesc = 0, enq_ret;
|
|
|
|
|
|
|
|
*ret = 0;
|
|
|
|
|
2019-05-27 21:51:06 +07:00
|
|
|
if (unlikely(!sreq->rdescs))
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
while (sreq->rdescs--) {
|
2017-05-24 21:10:34 +07:00
|
|
|
rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
|
|
|
|
if (IS_ERR(rdesc)) {
|
|
|
|
dev_err(priv->dev,
|
|
|
|
"cipher: invalidate: could not retrieve the result descriptor\n");
|
|
|
|
*ret = PTR_ERR(rdesc);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2018-05-29 19:13:43 +07:00
|
|
|
if (likely(!*ret))
|
|
|
|
*ret = safexcel_rdesc_check_errors(priv, rdesc);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
ndesc++;
|
2019-05-27 21:51:06 +07:00
|
|
|
}
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
safexcel_complete(priv, ring);
|
|
|
|
|
|
|
|
if (ctx->base.exit_inv) {
|
|
|
|
dma_pool_free(priv->context_pool, ctx->base.ctxr,
|
|
|
|
ctx->base.ctxr_dma);
|
|
|
|
|
|
|
|
*should_complete = true;
|
|
|
|
|
|
|
|
return ndesc;
|
|
|
|
}
|
|
|
|
|
2017-06-15 14:56:24 +07:00
|
|
|
ring = safexcel_select_ring(priv);
|
|
|
|
ctx->base.ring = ring;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2017-06-15 14:56:24 +07:00
|
|
|
spin_lock_bh(&priv->ring[ring].queue_lock);
|
2018-05-14 20:10:56 +07:00
|
|
|
enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
|
2017-06-15 14:56:24 +07:00
|
|
|
spin_unlock_bh(&priv->ring[ring].queue_lock);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
if (enq_ret != -EINPROGRESS)
|
|
|
|
*ret = enq_ret;
|
|
|
|
|
2017-12-14 21:26:51 +07:00
|
|
|
queue_work(priv->ring[ring].workqueue,
|
|
|
|
&priv->ring[ring].work_data.work);
|
2017-06-15 14:56:24 +07:00
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
*should_complete = false;
|
|
|
|
|
|
|
|
return ndesc;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
|
|
|
|
int ring,
|
|
|
|
struct crypto_async_request *async,
|
|
|
|
bool *should_complete, int *ret)
|
2017-12-11 18:10:55 +07:00
|
|
|
{
|
|
|
|
struct skcipher_request *req = skcipher_request_cast(async);
|
|
|
|
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
|
|
|
|
int err;
|
|
|
|
|
|
|
|
if (sreq->needs_inv) {
|
|
|
|
sreq->needs_inv = false;
|
2019-05-27 21:51:06 +07:00
|
|
|
err = safexcel_handle_inv_result(priv, ring, async, sreq,
|
2017-12-11 18:10:55 +07:00
|
|
|
should_complete, ret);
|
|
|
|
} else {
|
2018-05-14 20:10:56 +07:00
|
|
|
err = safexcel_handle_req_result(priv, ring, async, req->src,
|
|
|
|
req->dst, req->cryptlen, sreq,
|
2017-12-11 18:10:55 +07:00
|
|
|
should_complete, ret);
|
|
|
|
}
|
|
|
|
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:11:02 +07:00
|
|
|
static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
|
|
|
|
int ring,
|
|
|
|
struct crypto_async_request *async,
|
|
|
|
bool *should_complete, int *ret)
|
|
|
|
{
|
|
|
|
struct aead_request *req = aead_request_cast(async);
|
|
|
|
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
|
|
|
|
struct safexcel_cipher_req *sreq = aead_request_ctx(req);
|
|
|
|
int err;
|
|
|
|
|
|
|
|
if (sreq->needs_inv) {
|
|
|
|
sreq->needs_inv = false;
|
2019-05-27 21:51:06 +07:00
|
|
|
err = safexcel_handle_inv_result(priv, ring, async, sreq,
|
2018-05-14 20:11:02 +07:00
|
|
|
should_complete, ret);
|
|
|
|
} else {
|
|
|
|
err = safexcel_handle_req_result(priv, ring, async, req->src,
|
|
|
|
req->dst,
|
|
|
|
req->cryptlen + crypto_aead_authsize(tfm),
|
|
|
|
sreq, should_complete, ret);
|
|
|
|
}
|
|
|
|
|
|
|
|
return err;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
static int safexcel_cipher_send_inv(struct crypto_async_request *base,
|
2018-06-28 22:21:57 +07:00
|
|
|
int ring, int *commands, int *results)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
2018-05-14 20:10:56 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
|
2017-05-24 21:10:34 +07:00
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
int ret;
|
|
|
|
|
2018-06-28 22:21:57 +07:00
|
|
|
ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
|
2017-05-24 21:10:34 +07:00
|
|
|
if (unlikely(ret))
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
*commands = 1;
|
|
|
|
*results = 1;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
|
|
|
|
int *commands, int *results)
|
2017-12-11 18:10:55 +07:00
|
|
|
{
|
|
|
|
struct skcipher_request *req = skcipher_request_cast(async);
|
2017-12-14 21:26:58 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
2017-12-11 18:10:55 +07:00
|
|
|
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
|
2017-12-14 21:26:58 +07:00
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
2017-12-11 18:10:55 +07:00
|
|
|
int ret;
|
|
|
|
|
2018-06-28 22:15:35 +07:00
|
|
|
BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
|
2017-12-14 21:26:58 +07:00
|
|
|
|
2019-07-02 21:39:54 +07:00
|
|
|
if (sreq->needs_inv) {
|
2018-06-28 22:21:57 +07:00
|
|
|
ret = safexcel_cipher_send_inv(async, ring, commands, results);
|
2019-07-02 21:39:54 +07:00
|
|
|
} else {
|
|
|
|
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
|
|
|
|
u8 input_iv[AES_BLOCK_SIZE];
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Save input IV in case of CBC decrypt mode
|
|
|
|
* Will be overwritten with output IV prior to use!
|
|
|
|
*/
|
|
|
|
memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
|
|
|
|
|
2018-06-28 22:21:57 +07:00
|
|
|
ret = safexcel_send_req(async, ring, sreq, req->src,
|
2019-07-02 21:39:54 +07:00
|
|
|
req->dst, req->cryptlen, 0, 0, input_iv,
|
2018-05-14 20:11:02 +07:00
|
|
|
commands, results);
|
2019-07-02 21:39:54 +07:00
|
|
|
}
|
2019-05-27 21:51:06 +07:00
|
|
|
|
|
|
|
sreq->rdescs = *results;
|
2018-05-14 20:11:02 +07:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_send(struct crypto_async_request *async, int ring,
|
2018-06-28 22:21:57 +07:00
|
|
|
int *commands, int *results)
|
2018-05-14 20:11:02 +07:00
|
|
|
{
|
|
|
|
struct aead_request *req = aead_request_cast(async);
|
|
|
|
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
|
|
|
|
struct safexcel_cipher_req *sreq = aead_request_ctx(req);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
int ret;
|
|
|
|
|
2018-06-28 22:15:35 +07:00
|
|
|
BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
|
2018-05-14 20:11:02 +07:00
|
|
|
|
|
|
|
if (sreq->needs_inv)
|
2018-06-28 22:21:57 +07:00
|
|
|
ret = safexcel_cipher_send_inv(async, ring, commands, results);
|
2018-05-14 20:11:02 +07:00
|
|
|
else
|
2018-06-28 22:21:57 +07:00
|
|
|
ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
|
|
|
|
req->cryptlen, req->assoclen,
|
2018-05-14 20:11:02 +07:00
|
|
|
crypto_aead_authsize(tfm), req->iv,
|
2017-12-11 18:10:55 +07:00
|
|
|
commands, results);
|
2019-05-27 21:51:06 +07:00
|
|
|
sreq->rdescs = *results;
|
2017-12-11 18:10:55 +07:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
|
|
|
|
struct crypto_async_request *base,
|
|
|
|
struct safexcel_cipher_req *sreq,
|
|
|
|
struct safexcel_inv_result *result)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
2017-06-15 14:56:24 +07:00
|
|
|
int ring = ctx->base.ring;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
init_completion(&result->completion);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
ctx = crypto_tfm_ctx(base->tfm);
|
2017-05-24 21:10:34 +07:00
|
|
|
ctx->base.exit_inv = true;
|
2017-12-11 18:10:55 +07:00
|
|
|
sreq->needs_inv = true;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2017-06-15 14:56:24 +07:00
|
|
|
spin_lock_bh(&priv->ring[ring].queue_lock);
|
2018-05-14 20:10:56 +07:00
|
|
|
crypto_enqueue_request(&priv->ring[ring].queue, base);
|
2017-06-15 14:56:24 +07:00
|
|
|
spin_unlock_bh(&priv->ring[ring].queue_lock);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2017-12-14 21:26:51 +07:00
|
|
|
queue_work(priv->ring[ring].workqueue,
|
|
|
|
&priv->ring[ring].work_data.work);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
wait_for_completion(&result->completion);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
if (result->error) {
|
2017-05-24 21:10:34 +07:00
|
|
|
dev_warn(priv->dev,
|
|
|
|
"cipher: sync: invalidate: completion error %d\n",
|
2018-05-14 20:10:56 +07:00
|
|
|
result->error);
|
|
|
|
return result->error;
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
2018-05-14 20:10:56 +07:00
|
|
|
EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
|
2017-12-11 18:10:55 +07:00
|
|
|
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
|
2018-05-14 20:10:56 +07:00
|
|
|
struct safexcel_inv_result result = {};
|
|
|
|
|
|
|
|
memset(req, 0, sizeof(struct skcipher_request));
|
|
|
|
|
|
|
|
skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
|
|
|
|
safexcel_inv_complete, &result);
|
|
|
|
skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
|
|
|
|
|
|
|
|
return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:11:02 +07:00
|
|
|
static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
|
|
|
|
struct safexcel_cipher_req *sreq = aead_request_ctx(req);
|
|
|
|
struct safexcel_inv_result result = {};
|
|
|
|
|
|
|
|
memset(req, 0, sizeof(struct aead_request));
|
|
|
|
|
|
|
|
aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
|
|
|
|
safexcel_inv_complete, &result);
|
|
|
|
aead_request_set_tfm(req, __crypto_aead_cast(tfm));
|
|
|
|
|
|
|
|
return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
|
|
|
|
}
|
|
|
|
|
2018-06-28 22:21:55 +07:00
|
|
|
static int safexcel_queue_req(struct crypto_async_request *base,
|
2018-05-14 20:10:56 +07:00
|
|
|
struct safexcel_cipher_req *sreq,
|
2019-08-30 14:40:52 +07:00
|
|
|
enum safexcel_cipher_direction dir)
|
2018-05-14 20:10:56 +07:00
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
|
2017-05-24 21:10:34 +07:00
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
2017-06-15 14:56:24 +07:00
|
|
|
int ret, ring;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2017-12-11 18:10:55 +07:00
|
|
|
sreq->needs_inv = false;
|
2017-12-14 21:26:49 +07:00
|
|
|
sreq->direction = dir;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
if (ctx->base.ctxr) {
|
2018-06-28 22:15:35 +07:00
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
|
2017-12-11 18:10:55 +07:00
|
|
|
sreq->needs_inv = true;
|
|
|
|
ctx->base.needs_inv = false;
|
|
|
|
}
|
2017-05-24 21:10:34 +07:00
|
|
|
} else {
|
|
|
|
ctx->base.ring = safexcel_select_ring(priv);
|
|
|
|
ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
|
2018-05-14 20:10:56 +07:00
|
|
|
EIP197_GFP_FLAGS(*base),
|
2017-05-24 21:10:34 +07:00
|
|
|
&ctx->base.ctxr_dma);
|
|
|
|
if (!ctx->base.ctxr)
|
|
|
|
return -ENOMEM;
|
|
|
|
}
|
|
|
|
|
2017-06-15 14:56:24 +07:00
|
|
|
ring = ctx->base.ring;
|
|
|
|
|
|
|
|
spin_lock_bh(&priv->ring[ring].queue_lock);
|
2018-05-14 20:10:56 +07:00
|
|
|
ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
|
2017-06-15 14:56:24 +07:00
|
|
|
spin_unlock_bh(&priv->ring[ring].queue_lock);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2017-12-14 21:26:51 +07:00
|
|
|
queue_work(priv->ring[ring].workqueue,
|
|
|
|
&priv->ring[ring].work_data.work);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_encrypt(struct skcipher_request *req)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
2018-06-28 22:21:55 +07:00
|
|
|
return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
|
2019-08-30 14:40:52 +07:00
|
|
|
SAFEXCEL_ENCRYPT);
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_decrypt(struct skcipher_request *req)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
2018-06-28 22:21:55 +07:00
|
|
|
return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
|
2019-08-30 14:40:52 +07:00
|
|
|
SAFEXCEL_DECRYPT);
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_alg_template *tmpl =
|
|
|
|
container_of(tfm->__crt_alg, struct safexcel_alg_template,
|
|
|
|
alg.skcipher.base);
|
|
|
|
|
2017-12-11 18:10:55 +07:00
|
|
|
crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
|
|
|
|
sizeof(struct safexcel_cipher_req));
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
ctx->priv = tmpl->priv;
|
|
|
|
|
|
|
|
ctx->base.send = safexcel_skcipher_send;
|
|
|
|
ctx->base.handle_result = safexcel_skcipher_handle_result;
|
2017-05-24 21:10:34 +07:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:10:56 +07:00
|
|
|
static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
2018-05-14 20:10:59 +07:00
|
|
|
memzero_explicit(ctx->key, sizeof(ctx->key));
|
2017-05-24 21:10:34 +07:00
|
|
|
|
|
|
|
/* context not allocated, skip invalidation */
|
|
|
|
if (!ctx->base.ctxr)
|
2018-05-14 20:10:56 +07:00
|
|
|
return -ENOMEM;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-05-14 20:10:59 +07:00
|
|
|
memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
|
2018-05-14 20:10:56 +07:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
if (safexcel_cipher_cra_exit(tfm))
|
|
|
|
return;
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2018-06-28 22:15:35 +07:00
|
|
|
if (priv->flags & EIP197_TRC_CACHE) {
|
2018-05-14 20:10:56 +07:00
|
|
|
ret = safexcel_skcipher_exit_inv(tfm);
|
2017-12-14 21:26:58 +07:00
|
|
|
if (ret)
|
2018-05-14 20:10:56 +07:00
|
|
|
dev_warn(priv->dev, "skcipher: invalidation error %d\n",
|
|
|
|
ret);
|
2017-12-14 21:26:58 +07:00
|
|
|
} else {
|
|
|
|
dma_pool_free(priv->context_pool, ctx->base.ctxr,
|
|
|
|
ctx->base.ctxr_dma);
|
|
|
|
}
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
2018-05-14 20:11:02 +07:00
|
|
|
static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
if (safexcel_cipher_cra_exit(tfm))
|
|
|
|
return;
|
|
|
|
|
2018-06-28 22:15:35 +07:00
|
|
|
if (priv->flags & EIP197_TRC_CACHE) {
|
2018-05-14 20:11:02 +07:00
|
|
|
ret = safexcel_aead_exit_inv(tfm);
|
|
|
|
if (ret)
|
|
|
|
dev_warn(priv->dev, "aead: invalidation error %d\n",
|
|
|
|
ret);
|
|
|
|
} else {
|
|
|
|
dma_pool_free(priv->context_pool, ctx->base.ctxr,
|
|
|
|
ctx->base.ctxr_dma);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_AES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2017-05-24 21:10:34 +07:00
|
|
|
struct safexcel_alg_template safexcel_alg_ecb_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES,
|
2017-05-24 21:10:34 +07:00
|
|
|
.alg.skcipher = {
|
2018-05-14 20:10:56 +07:00
|
|
|
.setkey = safexcel_skcipher_aes_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
2017-05-24 21:10:34 +07:00
|
|
|
.min_keysize = AES_MIN_KEY_SIZE,
|
|
|
|
.max_keysize = AES_MAX_KEY_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "ecb(aes)",
|
|
|
|
.cra_driver_name = "safexcel-ecb-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-07-01 05:16:15 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2017-05-24 21:10:34 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
2019-08-30 14:40:52 +07:00
|
|
|
.cra_init = safexcel_skcipher_aes_ecb_cra_init,
|
2017-05-24 21:10:34 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
|
2017-05-24 21:10:34 +07:00
|
|
|
{
|
2019-08-30 14:40:52 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
2017-05-24 21:10:34 +07:00
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_AES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
|
|
|
|
return 0;
|
2017-05-24 21:10:34 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_cbc_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES,
|
2017-05-24 21:10:34 +07:00
|
|
|
.alg.skcipher = {
|
2018-05-14 20:10:56 +07:00
|
|
|
.setkey = safexcel_skcipher_aes_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
2017-05-24 21:10:34 +07:00
|
|
|
.min_keysize = AES_MIN_KEY_SIZE,
|
|
|
|
.max_keysize = AES_MAX_KEY_SIZE,
|
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "cbc(aes)",
|
|
|
|
.cra_driver_name = "safexcel-cbc-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-07-01 05:16:15 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2017-05-24 21:10:34 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
2019-08-30 14:40:52 +07:00
|
|
|
.cra_init = safexcel_skcipher_aes_cbc_cra_init,
|
2019-08-30 14:52:31 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_AES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_cfb_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
|
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_aes_setkey,
|
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
|
|
|
.min_keysize = AES_MIN_KEY_SIZE,
|
|
|
|
.max_keysize = AES_MAX_KEY_SIZE,
|
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "cfb(aes)",
|
|
|
|
.cra_driver_name = "safexcel-cfb-aes",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_aes_cfb_cra_init,
|
2019-08-30 14:52:32 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_AES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_ofb_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
|
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_aes_setkey,
|
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
|
|
|
.min_keysize = AES_MIN_KEY_SIZE,
|
|
|
|
.max_keysize = AES_MAX_KEY_SIZE,
|
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "ofb(aes)",
|
|
|
|
.cra_driver_name = "safexcel-ofb-aes",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_aes_ofb_cra_init,
|
2017-05-24 21:10:34 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2018-05-14 20:11:02 +07:00
|
|
|
|
2019-07-05 13:49:23 +07:00
|
|
|
static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
struct crypto_aes_ctx aes;
|
|
|
|
int ret, i;
|
|
|
|
unsigned int keylen;
|
|
|
|
|
|
|
|
/* last 4 bytes of key are the nonce! */
|
2019-07-30 20:27:11 +07:00
|
|
|
ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
|
2019-07-05 13:49:23 +07:00
|
|
|
/* exclude the nonce here */
|
2019-07-30 20:27:11 +07:00
|
|
|
keylen = len - CTR_RFC3686_NONCE_SIZE;
|
2019-07-05 13:49:23 +07:00
|
|
|
ret = aes_expandkey(&aes, key, keylen);
|
|
|
|
if (ret) {
|
|
|
|
crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
|
|
for (i = 0; i < keylen / sizeof(u32); i++) {
|
2019-10-22 16:01:43 +07:00
|
|
|
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
2019-07-05 13:49:23 +07:00
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < keylen / sizeof(u32); i++)
|
|
|
|
ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
|
|
|
|
|
|
|
|
ctx->key_len = keylen;
|
|
|
|
|
|
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_AES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2019-07-05 13:49:23 +07:00
|
|
|
struct safexcel_alg_template safexcel_alg_ctr_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES,
|
2019-07-05 13:49:23 +07:00
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_aesctr_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
2019-07-30 20:27:11 +07:00
|
|
|
/* Add nonce size */
|
|
|
|
.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
|
|
.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
2019-07-05 13:49:23 +07:00
|
|
|
.base = {
|
|
|
|
.cra_name = "rfc3686(ctr(aes))",
|
|
|
|
.cra_driver_name = "safexcel-ctr-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2019-07-05 13:49:23 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
2019-08-30 14:40:52 +07:00
|
|
|
.cra_init = safexcel_skcipher_aes_ctr_cra_init,
|
2019-07-05 13:49:23 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2018-06-28 22:21:55 +07:00
|
|
|
static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
|
|
|
|
unsigned int len)
|
|
|
|
{
|
2019-08-15 16:00:55 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
|
2018-06-28 22:21:55 +07:00
|
|
|
int ret;
|
|
|
|
|
2019-08-15 16:00:55 +07:00
|
|
|
ret = verify_skcipher_des_key(ctfm, key);
|
|
|
|
if (ret)
|
|
|
|
return ret;
|
2018-06-28 22:21:55 +07:00
|
|
|
|
|
|
|
/* if context exits and key changed, need to invalidate it */
|
|
|
|
if (ctx->base.ctxr_dma)
|
|
|
|
if (memcmp(ctx->key, key, len))
|
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
|
|
|
|
memcpy(ctx->key, key, len);
|
|
|
|
ctx->key_len = len;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_DES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-06-28 22:21:55 +07:00
|
|
|
struct safexcel_alg_template safexcel_alg_cbc_des = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_DES,
|
2018-06-28 22:21:55 +07:00
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_des_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
2018-06-28 22:21:55 +07:00
|
|
|
.min_keysize = DES_KEY_SIZE,
|
|
|
|
.max_keysize = DES_KEY_SIZE,
|
|
|
|
.ivsize = DES_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "cbc(des)",
|
|
|
|
.cra_driver_name = "safexcel-cbc-des",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-11-15 02:10:53 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-06-28 22:21:55 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
2019-08-30 14:40:52 +07:00
|
|
|
.cra_init = safexcel_skcipher_des_cbc_cra_init,
|
2018-06-28 22:21:55 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
|
2018-06-28 22:21:55 +07:00
|
|
|
{
|
2019-08-30 14:40:52 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
2018-06-28 22:21:55 +07:00
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_DES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
|
|
|
|
return 0;
|
2018-06-28 22:21:55 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_ecb_des = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_DES,
|
2018-06-28 22:21:55 +07:00
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_des_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
2018-06-28 22:21:55 +07:00
|
|
|
.min_keysize = DES_KEY_SIZE,
|
|
|
|
.max_keysize = DES_KEY_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "ecb(des)",
|
|
|
|
.cra_driver_name = "safexcel-ecb-des",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-11-15 02:10:53 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-06-28 22:21:55 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
2019-08-30 14:40:52 +07:00
|
|
|
.cra_init = safexcel_skcipher_des_ecb_cra_init,
|
2018-06-28 22:21:55 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2018-06-28 22:21:56 +07:00
|
|
|
|
|
|
|
static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
|
|
|
{
|
2019-04-11 15:51:10 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
|
|
|
|
int err;
|
2018-06-28 22:21:56 +07:00
|
|
|
|
2019-08-15 16:00:55 +07:00
|
|
|
err = verify_skcipher_des3_key(ctfm, key);
|
|
|
|
if (err)
|
2019-04-11 15:51:10 +07:00
|
|
|
return err;
|
2018-06-28 22:21:56 +07:00
|
|
|
|
|
|
|
/* if context exits and key changed, need to invalidate it */
|
2019-10-22 16:01:43 +07:00
|
|
|
if (ctx->base.ctxr_dma)
|
2018-06-28 22:21:56 +07:00
|
|
|
if (memcmp(ctx->key, key, len))
|
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
|
|
|
|
memcpy(ctx->key, key, len);
|
|
|
|
ctx->key_len = len;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_3DES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-06-28 22:21:56 +07:00
|
|
|
struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_DES,
|
2018-06-28 22:21:56 +07:00
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_des3_ede_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
2018-06-28 22:21:56 +07:00
|
|
|
.min_keysize = DES3_EDE_KEY_SIZE,
|
|
|
|
.max_keysize = DES3_EDE_KEY_SIZE,
|
|
|
|
.ivsize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "cbc(des3_ede)",
|
|
|
|
.cra_driver_name = "safexcel-cbc-des3_ede",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-11-15 02:10:53 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-06-28 22:21:56 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
2019-08-30 14:40:52 +07:00
|
|
|
.cra_init = safexcel_skcipher_des3_cbc_cra_init,
|
2018-06-28 22:21:56 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
|
2018-06-28 22:21:56 +07:00
|
|
|
{
|
2019-08-30 14:40:52 +07:00
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
2018-06-28 22:21:56 +07:00
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_3DES;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
|
|
|
|
return 0;
|
2018-06-28 22:21:56 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_DES,
|
2018-06-28 22:21:56 +07:00
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_des3_ede_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
2018-06-28 22:21:56 +07:00
|
|
|
.min_keysize = DES3_EDE_KEY_SIZE,
|
|
|
|
.max_keysize = DES3_EDE_KEY_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "ecb(des3_ede)",
|
|
|
|
.cra_driver_name = "safexcel-ecb-des3_ede",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-11-15 02:10:53 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-06-28 22:21:56 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
2019-08-30 14:40:52 +07:00
|
|
|
.cra_init = safexcel_skcipher_des3_ecb_cra_init,
|
2018-06-28 22:21:56 +07:00
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_aead_encrypt(struct aead_request *req)
|
2018-05-14 20:11:02 +07:00
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
|
2018-05-14 20:11:02 +07:00
|
|
|
}
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
static int safexcel_aead_decrypt(struct aead_request *req)
|
2018-05-14 20:11:02 +07:00
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
|
2019-08-30 14:40:52 +07:00
|
|
|
return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
|
2018-05-14 20:11:02 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_alg_template *tmpl =
|
|
|
|
container_of(tfm->__crt_alg, struct safexcel_alg_template,
|
|
|
|
alg.aead.base);
|
|
|
|
|
|
|
|
crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
|
|
|
|
sizeof(struct safexcel_cipher_req));
|
|
|
|
|
|
|
|
ctx->priv = tmpl->priv;
|
|
|
|
|
2019-07-05 13:49:24 +07:00
|
|
|
ctx->alg = SAFEXCEL_AES; /* default */
|
2019-08-30 14:40:52 +07:00
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
|
2018-05-14 20:11:02 +07:00
|
|
|
ctx->aead = true;
|
|
|
|
ctx->base.send = safexcel_aead_send;
|
|
|
|
ctx->base.handle_result = safexcel_aead_handle_result;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-05-14 20:11:04 +07:00
|
|
|
static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
2018-06-28 22:21:55 +07:00
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
|
2018-05-14 20:11:04 +07:00
|
|
|
ctx->state_sz = SHA1_DIGEST_SIZE;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
|
2018-05-14 20:11:04 +07:00
|
|
|
.alg.aead = {
|
2019-07-05 13:49:22 +07:00
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2018-05-14 20:11:04 +07:00
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA1_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha1),cbc(aes))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-07-01 05:16:14 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-05-14 20:11:04 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha1_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2018-05-14 20:11:02 +07:00
|
|
|
static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
2018-06-28 22:21:55 +07:00
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
|
2018-05-14 20:11:02 +07:00
|
|
|
ctx->state_sz = SHA256_DIGEST_SIZE;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
|
2018-05-14 20:11:02 +07:00
|
|
|
.alg.aead = {
|
2019-07-05 13:49:22 +07:00
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2018-05-14 20:11:02 +07:00
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA256_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha256),cbc(aes))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-07-01 05:16:14 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-05-14 20:11:02 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha256_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2018-05-14 20:11:03 +07:00
|
|
|
|
|
|
|
static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
2018-06-28 22:21:55 +07:00
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
|
2018-05-14 20:11:03 +07:00
|
|
|
ctx->state_sz = SHA256_DIGEST_SIZE;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
|
2018-05-14 20:11:03 +07:00
|
|
|
.alg.aead = {
|
2019-07-05 13:49:22 +07:00
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2018-05-14 20:11:03 +07:00
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA224_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha224),cbc(aes))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-07-01 05:16:14 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-05-14 20:11:03 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha224_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2018-05-29 19:13:48 +07:00
|
|
|
|
|
|
|
static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
2018-06-28 22:21:55 +07:00
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
|
2018-05-29 19:13:48 +07:00
|
|
|
ctx->state_sz = SHA512_DIGEST_SIZE;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
|
2018-05-29 19:13:48 +07:00
|
|
|
.alg.aead = {
|
2019-07-05 13:49:22 +07:00
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2018-05-29 19:13:48 +07:00
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA512_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha512),cbc(aes))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-07-01 05:16:14 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-05-29 19:13:48 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha512_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2018-05-29 19:13:52 +07:00
|
|
|
|
|
|
|
static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
2018-06-28 22:21:55 +07:00
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
|
2018-05-29 19:13:52 +07:00
|
|
|
ctx->state_sz = SHA512_DIGEST_SIZE;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
|
2018-05-29 19:13:52 +07:00
|
|
|
.alg.aead = {
|
2019-07-05 13:49:22 +07:00
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2018-05-29 19:13:52 +07:00
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA384_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha384),cbc(aes))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2018-07-01 05:16:14 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
2018-05-29 19:13:52 +07:00
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = AES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha384_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-07-05 13:49:22 +07:00
|
|
|
|
2019-07-05 13:49:24 +07:00
|
|
|
static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha1_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2019-07-05 13:49:22 +07:00
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
|
2019-07-05 13:49:22 +07:00
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2019-07-05 13:49:22 +07:00
|
|
|
.ivsize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA1_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2019-07-05 13:49:22 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
2019-07-05 13:49:24 +07:00
|
|
|
.cra_init = safexcel_aead_sha1_des3_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-09-14 03:04:45 +07:00
|
|
|
static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha256_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA256_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha256_des3_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha224_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA224_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha224_des3_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha512_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA512_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha512_des3_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha384_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_3DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA384_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha384_des3_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-09-14 03:04:44 +07:00
|
|
|
static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha1_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA1_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha1),cbc(des))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha1_des_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-09-14 03:04:46 +07:00
|
|
|
static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha256_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA256_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha256),cbc(des))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha256_des_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha224_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA224_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha224),cbc(des))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha224_des_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha512_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA512_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha512),cbc(des))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha512_des_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha384_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_DES; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = DES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA384_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha384),cbc(des))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = DES_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha384_des_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
2019-07-05 13:49:24 +07:00
|
|
|
static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha1_cra_init(tfm);
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
|
2019-07-05 13:49:24 +07:00
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2019-07-30 20:27:11 +07:00
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
2019-07-05 13:49:24 +07:00
|
|
|
.maxauthsize = SHA1_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2019-07-05 13:49:24 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha1_ctr_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha256_cra_init(tfm);
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
|
2019-07-05 13:49:24 +07:00
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2019-07-30 20:27:11 +07:00
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
2019-07-05 13:49:24 +07:00
|
|
|
.maxauthsize = SHA256_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2019-07-05 13:49:24 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha256_ctr_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha224_cra_init(tfm);
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
|
2019-07-05 13:49:24 +07:00
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2019-07-30 20:27:11 +07:00
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
2019-07-05 13:49:24 +07:00
|
|
|
.maxauthsize = SHA224_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2019-07-05 13:49:24 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha224_ctr_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha512_cra_init(tfm);
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
|
2019-07-05 13:49:24 +07:00
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2019-07-30 20:27:11 +07:00
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
2019-07-05 13:49:24 +07:00
|
|
|
.maxauthsize = SHA512_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2019-07-05 13:49:24 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha512_ctr_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sha384_cra_init(tfm);
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
|
2019-07-05 13:49:24 +07:00
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
2019-08-30 14:40:52 +07:00
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
2019-07-30 20:27:11 +07:00
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
2019-07-05 13:49:24 +07:00
|
|
|
.maxauthsize = SHA384_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2019-07-05 13:49:24 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sha384_ctr_cra_init,
|
2019-07-05 13:49:22 +07:00
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-08-30 14:40:53 +07:00
|
|
|
|
|
|
|
static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
struct crypto_aes_ctx aes;
|
|
|
|
int ret, i;
|
|
|
|
unsigned int keylen;
|
|
|
|
|
|
|
|
/* Check for illegal XTS keys */
|
|
|
|
ret = xts_verify_key(ctfm, key, len);
|
|
|
|
if (ret)
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
/* Only half of the key data is cipher key */
|
|
|
|
keylen = (len >> 1);
|
|
|
|
ret = aes_expandkey(&aes, key, keylen);
|
|
|
|
if (ret) {
|
|
|
|
crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
|
|
for (i = 0; i < keylen / sizeof(u32); i++) {
|
2019-10-22 16:01:43 +07:00
|
|
|
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
2019-08-30 14:40:53 +07:00
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < keylen / sizeof(u32); i++)
|
|
|
|
ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
|
|
|
|
|
|
|
|
/* The other half is the tweak key */
|
|
|
|
ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
|
|
|
|
if (ret) {
|
|
|
|
crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
|
|
for (i = 0; i < keylen / sizeof(u32); i++) {
|
2019-10-22 16:01:43 +07:00
|
|
|
if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
|
|
|
|
aes.key_enc[i]) {
|
2019-08-30 14:40:53 +07:00
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < keylen / sizeof(u32); i++)
|
|
|
|
ctx->key[i + keylen / sizeof(u32)] =
|
|
|
|
cpu_to_le32(aes.key_enc[i]);
|
|
|
|
|
|
|
|
ctx->key_len = keylen << 1;
|
|
|
|
|
|
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_AES;
|
|
|
|
ctx->xts = 1;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_encrypt_xts(struct skcipher_request *req)
|
|
|
|
{
|
|
|
|
if (req->cryptlen < XTS_BLOCK_SIZE)
|
|
|
|
return -EINVAL;
|
|
|
|
return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
|
|
|
|
SAFEXCEL_ENCRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_decrypt_xts(struct skcipher_request *req)
|
|
|
|
{
|
|
|
|
if (req->cryptlen < XTS_BLOCK_SIZE)
|
|
|
|
return -EINVAL;
|
|
|
|
return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
|
|
|
|
SAFEXCEL_DECRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_xts_aes = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
2019-08-30 14:40:54 +07:00
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
|
2019-08-30 14:40:53 +07:00
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_aesxts_setkey,
|
|
|
|
.encrypt = safexcel_encrypt_xts,
|
|
|
|
.decrypt = safexcel_decrypt_xts,
|
|
|
|
/* XTS actually uses 2 AES keys glued together */
|
|
|
|
.min_keysize = AES_MIN_KEY_SIZE * 2,
|
|
|
|
.max_keysize = AES_MAX_KEY_SIZE * 2,
|
|
|
|
.ivsize = XTS_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "xts(aes)",
|
|
|
|
.cra_driver_name = "safexcel-xts-aes",
|
2019-08-30 14:41:47 +07:00
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
2019-08-30 14:40:53 +07:00
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = XTS_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_aes_xts_cra_init,
|
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-08-30 14:52:30 +07:00
|
|
|
|
|
|
|
static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
|
|
|
|
unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
struct crypto_aes_ctx aes;
|
|
|
|
u32 hashkey[AES_BLOCK_SIZE >> 2];
|
|
|
|
int ret, i;
|
|
|
|
|
|
|
|
ret = aes_expandkey(&aes, key, len);
|
|
|
|
if (ret) {
|
|
|
|
crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
|
|
for (i = 0; i < len / sizeof(u32); i++) {
|
2019-10-22 16:01:43 +07:00
|
|
|
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
2019-08-30 14:52:30 +07:00
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < len / sizeof(u32); i++)
|
|
|
|
ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
|
|
|
|
|
|
|
|
ctx->key_len = len;
|
|
|
|
|
|
|
|
/* Compute hash key by encrypting zeroes with cipher key */
|
|
|
|
crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
|
|
|
|
crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
|
|
|
|
CRYPTO_TFM_REQ_MASK);
|
|
|
|
ret = crypto_cipher_setkey(ctx->hkaes, key, len);
|
|
|
|
crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
|
|
|
|
CRYPTO_TFM_RES_MASK);
|
|
|
|
if (ret)
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
memset(hashkey, 0, AES_BLOCK_SIZE);
|
|
|
|
crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
|
|
|
|
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
|
|
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
|
2019-10-22 16:01:43 +07:00
|
|
|
if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
|
2019-08-30 14:52:30 +07:00
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
|
|
|
|
ctx->ipad[i] = cpu_to_be32(hashkey[i]);
|
|
|
|
|
|
|
|
memzero_explicit(hashkey, AES_BLOCK_SIZE);
|
|
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
|
|
|
|
ctx->state_sz = GHASH_BLOCK_SIZE;
|
2019-08-30 14:52:33 +07:00
|
|
|
ctx->xcm = EIP197_XCM_MODE_GCM;
|
2019-08-30 14:52:30 +07:00
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
|
|
|
|
|
|
|
|
ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
|
|
|
|
if (IS_ERR(ctx->hkaes))
|
|
|
|
return PTR_ERR(ctx->hkaes);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
crypto_free_cipher(ctx->hkaes);
|
|
|
|
safexcel_aead_cra_exit(tfm);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
|
|
|
|
unsigned int authsize)
|
|
|
|
{
|
|
|
|
return crypto_gcm_check_authsize(authsize);
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_gcm = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_gcm_setkey,
|
|
|
|
.setauthsize = safexcel_aead_gcm_setauthsize,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = GCM_AES_IV_SIZE,
|
|
|
|
.maxauthsize = GHASH_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "gcm(aes)",
|
|
|
|
.cra_driver_name = "safexcel-gcm-aes",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_gcm_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_gcm_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-08-30 14:52:33 +07:00
|
|
|
|
|
|
|
static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
|
|
|
|
unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
struct crypto_aes_ctx aes;
|
|
|
|
int ret, i;
|
|
|
|
|
|
|
|
ret = aes_expandkey(&aes, key, len);
|
|
|
|
if (ret) {
|
|
|
|
crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
|
|
|
|
for (i = 0; i < len / sizeof(u32); i++) {
|
2019-10-22 16:01:43 +07:00
|
|
|
if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
|
2019-08-30 14:52:33 +07:00
|
|
|
ctx->base.needs_inv = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < len / sizeof(u32); i++) {
|
|
|
|
ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
|
|
|
|
ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
|
|
|
|
cpu_to_be32(aes.key_enc[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx->key_len = len;
|
|
|
|
ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
|
|
|
|
|
|
|
|
if (len == AES_KEYSIZE_192)
|
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
|
|
|
|
else if (len == AES_KEYSIZE_256)
|
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
|
|
|
|
else
|
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
|
|
|
|
|
|
|
|
memzero_explicit(&aes, sizeof(aes));
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
|
|
|
|
ctx->state_sz = 3 * AES_BLOCK_SIZE;
|
|
|
|
ctx->xcm = EIP197_XCM_MODE_CCM;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
|
|
|
|
unsigned int authsize)
|
|
|
|
{
|
|
|
|
/* Borrowed from crypto/ccm.c */
|
|
|
|
switch (authsize) {
|
|
|
|
case 4:
|
|
|
|
case 6:
|
|
|
|
case 8:
|
|
|
|
case 10:
|
|
|
|
case 12:
|
|
|
|
case 14:
|
|
|
|
case 16:
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return -EINVAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_ccm_encrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
|
|
|
|
if (req->iv[0] < 1 || req->iv[0] > 7)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_ccm_decrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
|
|
|
|
if (req->iv[0] < 1 || req->iv[0] > 7)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_ccm = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_ccm_setkey,
|
|
|
|
.setauthsize = safexcel_aead_ccm_setauthsize,
|
|
|
|
.encrypt = safexcel_ccm_encrypt,
|
|
|
|
.decrypt = safexcel_ccm_decrypt,
|
|
|
|
.ivsize = AES_BLOCK_SIZE,
|
|
|
|
.maxauthsize = AES_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "ccm(aes)",
|
|
|
|
.cra_driver_name = "safexcel-ccm-aes",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_ccm_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-19 04:25:56 +07:00
|
|
|
|
2019-09-19 04:25:57 +07:00
|
|
|
static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
|
|
|
|
const u8 *key)
|
2019-09-19 04:25:56 +07:00
|
|
|
{
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
|
2019-10-22 16:01:43 +07:00
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
|
|
|
|
if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
|
|
|
|
ctx->base.needs_inv = true;
|
2019-09-19 04:25:56 +07:00
|
|
|
|
2019-10-22 16:01:43 +07:00
|
|
|
memcpy(ctx->key, key, CHACHA_KEY_SIZE);
|
2019-09-19 04:25:56 +07:00
|
|
|
ctx->key_len = CHACHA_KEY_SIZE;
|
2019-09-19 04:25:57 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
|
|
|
|
|
|
|
|
if (len != CHACHA_KEY_SIZE) {
|
|
|
|
crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
return -EINVAL;
|
|
|
|
}
|
|
|
|
safexcel_chacha20_setkey(ctx, key);
|
2019-09-19 04:25:56 +07:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_CHACHA20;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_chacha20 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_CHACHA20,
|
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_chacha20_setkey,
|
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
|
|
|
.min_keysize = CHACHA_KEY_SIZE,
|
|
|
|
.max_keysize = CHACHA_KEY_SIZE,
|
|
|
|
.ivsize = CHACHA_IV_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "chacha20",
|
|
|
|
.cra_driver_name = "safexcel-chacha20",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_chacha20_cra_init,
|
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-19 04:25:57 +07:00
|
|
|
|
|
|
|
static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
|
|
|
|
|
|
|
|
if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
|
|
|
|
len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
|
|
|
|
/* ESP variant has nonce appended to key */
|
|
|
|
len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
|
|
|
|
ctx->nonce = *(u32 *)(key + len);
|
|
|
|
}
|
|
|
|
if (len != CHACHA_KEY_SIZE) {
|
|
|
|
crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
return -EINVAL;
|
|
|
|
}
|
|
|
|
safexcel_chacha20_setkey(ctx, key);
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
|
|
|
|
unsigned int authsize)
|
|
|
|
{
|
|
|
|
if (authsize != POLY1305_DIGEST_SIZE)
|
|
|
|
return -EINVAL;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
|
|
|
|
enum safexcel_cipher_direction dir)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(aead);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct aead_request *subreq = aead_request_ctx(req);
|
|
|
|
u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
|
2019-10-22 16:01:43 +07:00
|
|
|
int ret = 0;
|
2019-09-19 04:25:57 +07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Instead of wasting time detecting umpteen silly corner cases,
|
|
|
|
* just dump all "small" requests to the fallback implementation.
|
|
|
|
* HW would not be faster on such small requests anyway.
|
|
|
|
*/
|
|
|
|
if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
|
|
|
|
req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
|
|
|
|
req->cryptlen > POLY1305_DIGEST_SIZE)) {
|
|
|
|
return safexcel_queue_req(&req->base, creq, dir);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* HW cannot do full (AAD+payload) zero length, use fallback */
|
2019-10-22 16:01:43 +07:00
|
|
|
memcpy(key, ctx->key, CHACHA_KEY_SIZE);
|
2019-09-19 04:25:57 +07:00
|
|
|
if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
|
|
|
|
/* ESP variant has nonce appended to the key */
|
|
|
|
key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
|
|
|
|
ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
|
|
|
|
CHACHA_KEY_SIZE +
|
|
|
|
EIP197_AEAD_IPSEC_NONCE_SIZE);
|
|
|
|
} else {
|
|
|
|
ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
|
|
|
|
CHACHA_KEY_SIZE);
|
|
|
|
}
|
|
|
|
if (ret) {
|
|
|
|
crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
|
|
|
|
crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
|
|
|
|
CRYPTO_TFM_REQ_MASK);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
aead_request_set_tfm(subreq, ctx->fback);
|
|
|
|
aead_request_set_callback(subreq, req->base.flags, req->base.complete,
|
|
|
|
req->base.data);
|
|
|
|
aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
|
|
|
|
req->iv);
|
|
|
|
aead_request_set_ad(subreq, req->assoclen);
|
|
|
|
|
|
|
|
return (dir == SAFEXCEL_ENCRYPT) ?
|
|
|
|
crypto_aead_encrypt(subreq) :
|
|
|
|
crypto_aead_decrypt(subreq);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
|
|
|
|
}
|
|
|
|
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
|
2019-09-19 04:25:57 +07:00
|
|
|
{
|
|
|
|
struct crypto_aead *aead = __crypto_aead_cast(tfm);
|
|
|
|
struct aead_alg *alg = crypto_aead_alg(aead);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
|
|
|
|
|
|
|
/* Allocate fallback implementation */
|
|
|
|
ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
|
|
|
|
CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_NEED_FALLBACK);
|
|
|
|
if (IS_ERR(ctx->fback))
|
|
|
|
return PTR_ERR(ctx->fback);
|
|
|
|
|
|
|
|
crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
|
|
|
|
sizeof(struct aead_request) +
|
|
|
|
crypto_aead_reqsize(ctx->fback)));
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_fallback_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_CHACHA20;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
|
|
|
|
CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
|
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
|
|
|
|
ctx->state_sz = 0; /* Precomputed by HW */
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
|
2019-09-19 04:25:57 +07:00
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
crypto_free_aead(ctx->fback);
|
|
|
|
safexcel_aead_cra_exit(tfm);
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_chachapoly = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_chachapoly_setkey,
|
|
|
|
.setauthsize = safexcel_aead_chachapoly_setauthsize,
|
|
|
|
.encrypt = safexcel_aead_chachapoly_encrypt,
|
|
|
|
.decrypt = safexcel_aead_chachapoly_decrypt,
|
|
|
|
.ivsize = CHACHAPOLY_IV_SIZE,
|
|
|
|
.maxauthsize = POLY1305_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "rfc7539(chacha20,poly1305)",
|
|
|
|
.cra_driver_name = "safexcel-chacha20-poly1305",
|
|
|
|
/* +1 to put it above HW chacha + SW poly */
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
|
|
CRYPTO_ALG_NEED_FALLBACK,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_chachapoly_cra_init,
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
.cra_exit = safexcel_aead_fallback_cra_exit,
|
2019-09-19 04:25:57 +07:00
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
ret = safexcel_aead_chachapoly_cra_init(tfm);
|
|
|
|
ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_chachapoly_setkey,
|
|
|
|
.setauthsize = safexcel_aead_chachapoly_setauthsize,
|
|
|
|
.encrypt = safexcel_aead_chachapoly_encrypt,
|
|
|
|
.decrypt = safexcel_aead_chachapoly_decrypt,
|
|
|
|
.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
|
|
|
|
.maxauthsize = POLY1305_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "rfc7539esp(chacha20,poly1305)",
|
|
|
|
.cra_driver_name = "safexcel-chacha20-poly1305-esp",
|
|
|
|
/* +1 to put it above HW chacha + SW poly */
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
|
|
CRYPTO_ALG_NEED_FALLBACK,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_chachapolyesp_cra_init,
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
.cra_exit = safexcel_aead_fallback_cra_exit,
|
2019-09-19 04:25:57 +07:00
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-13 16:10:36 +07:00
|
|
|
|
|
|
|
static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct safexcel_crypto_priv *priv = ctx->priv;
|
|
|
|
|
|
|
|
if (len != SM4_KEY_SIZE) {
|
|
|
|
crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
|
|
|
|
return -EINVAL;
|
|
|
|
}
|
|
|
|
|
2019-10-22 16:01:43 +07:00
|
|
|
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
|
|
|
|
if (memcmp(ctx->key, key, SM4_KEY_SIZE))
|
|
|
|
ctx->base.needs_inv = true;
|
2019-09-13 16:10:36 +07:00
|
|
|
|
2019-10-22 16:01:43 +07:00
|
|
|
memcpy(ctx->key, key, SM4_KEY_SIZE);
|
2019-09-13 16:10:36 +07:00
|
|
|
ctx->key_len = SM4_KEY_SIZE;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
|
|
|
|
{
|
|
|
|
/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
|
|
if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
|
|
|
|
return -EINVAL;
|
|
|
|
else
|
|
|
|
return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
|
|
|
|
SAFEXCEL_ENCRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
|
|
|
|
{
|
|
|
|
/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
|
|
if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
|
|
|
|
return -EINVAL;
|
|
|
|
else
|
|
|
|
return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
|
|
|
|
SAFEXCEL_DECRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_SM4;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4,
|
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_sm4_setkey,
|
|
|
|
.encrypt = safexcel_sm4_blk_encrypt,
|
|
|
|
.decrypt = safexcel_sm4_blk_decrypt,
|
|
|
|
.min_keysize = SM4_KEY_SIZE,
|
|
|
|
.max_keysize = SM4_KEY_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "ecb(sm4)",
|
|
|
|
.cra_driver_name = "safexcel-ecb-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = SM4_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
|
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-13 16:10:37 +07:00
|
|
|
|
|
|
|
static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_SM4;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4,
|
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_sm4_setkey,
|
|
|
|
.encrypt = safexcel_sm4_blk_encrypt,
|
|
|
|
.decrypt = safexcel_sm4_blk_decrypt,
|
|
|
|
.min_keysize = SM4_KEY_SIZE,
|
|
|
|
.max_keysize = SM4_KEY_SIZE,
|
|
|
|
.ivsize = SM4_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "cbc(sm4)",
|
|
|
|
.cra_driver_name = "safexcel-cbc-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = SM4_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
|
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-13 16:10:38 +07:00
|
|
|
|
|
|
|
static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_SM4;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
|
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_sm4_setkey,
|
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
|
|
|
.min_keysize = SM4_KEY_SIZE,
|
|
|
|
.max_keysize = SM4_KEY_SIZE,
|
|
|
|
.ivsize = SM4_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "ofb(sm4)",
|
|
|
|
.cra_driver_name = "safexcel-ofb-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_sm4_ofb_cra_init,
|
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-13 16:10:40 +07:00
|
|
|
|
|
|
|
static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_SM4;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
|
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_sm4_setkey,
|
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
|
|
|
.min_keysize = SM4_KEY_SIZE,
|
|
|
|
.max_keysize = SM4_KEY_SIZE,
|
|
|
|
.ivsize = SM4_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "cfb(sm4)",
|
|
|
|
.cra_driver_name = "safexcel-cfb-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_sm4_cfb_cra_init,
|
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-13 16:10:41 +07:00
|
|
|
|
|
|
|
static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
/* last 4 bytes of key are the nonce! */
|
|
|
|
ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
|
|
|
|
/* exclude the nonce here */
|
|
|
|
len -= CTR_RFC3686_NONCE_SIZE;
|
|
|
|
|
|
|
|
return safexcel_skcipher_sm4_setkey(ctfm, key, len);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_skcipher_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_SM4;
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4,
|
|
|
|
.alg.skcipher = {
|
|
|
|
.setkey = safexcel_skcipher_sm4ctr_setkey,
|
|
|
|
.encrypt = safexcel_encrypt,
|
|
|
|
.decrypt = safexcel_decrypt,
|
|
|
|
/* Add nonce size */
|
|
|
|
.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
|
|
.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
|
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "rfc3686(ctr(sm4))",
|
|
|
|
.cra_driver_name = "safexcel-ctr-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
|
|
|
|
.cra_exit = safexcel_skcipher_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
crypto: inside-secure - Add SM4 based authenc AEAD ciphersuites
This patch adds support for the authenc(hmac(sha1),cbc(sm4)),
authenc(hmac(sm3),cbc(sm4)), authenc(hmac(sha1),rfc3686(ctr(sm4))),
and authenc(hmac(sm3),rfc3686(ctr(sm4))) aead ciphersuites.
These are necessary to support IPsec according to the Chinese standard
GM/T 022-1014 - IPsec VPN specification.
Note that there are no testvectors present in testmgr for these
ciphersuites. However, considering all building blocks have already been
verified elsewhere, it is fair to assume the generic implementation to be
correct-by-construction.
The hardware implementation has been fuzzed against this generic
implementation by means of a locally modified testmgr. The intention is
to upstream these testmgr changes but this is pending other testmgr changes
being made by Eric Biggers.
The patch has been tested with the eip197c_iewxkbc configuration on the
Xilinx VCU118 development board, using the abovementioned modified testmgr
This patch applies on top of "Add support for SM4 ciphers" and needs to
be applied before "Add (HMAC) SHA3 support".
Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2019-09-14 01:44:50 +07:00
|
|
|
|
|
|
|
static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
|
|
if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
return safexcel_queue_req(&req->base, aead_request_ctx(req),
|
|
|
|
SAFEXCEL_ENCRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
|
|
|
|
|
|
|
|
/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
|
|
if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
return safexcel_queue_req(&req->base, aead_request_ctx(req),
|
|
|
|
SAFEXCEL_DECRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_SM4;
|
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
|
|
|
|
ctx->state_sz = SHA1_DIGEST_SIZE;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_sm4_blk_encrypt,
|
|
|
|
.decrypt = safexcel_aead_sm4_blk_decrypt,
|
|
|
|
.ivsize = SM4_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SHA1_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha1),cbc(sm4))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = SM4_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
|
|
|
|
const u8 *key, unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
/* Keep fallback cipher synchronized */
|
|
|
|
return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
|
|
|
|
safexcel_aead_setkey(ctfm, key, len);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
|
|
|
|
unsigned int authsize)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
/* Keep fallback cipher synchronized */
|
|
|
|
return crypto_aead_setauthsize(ctx->fback, authsize);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_fallback_crypt(struct aead_request *req,
|
|
|
|
enum safexcel_cipher_direction dir)
|
|
|
|
{
|
|
|
|
struct crypto_aead *aead = crypto_aead_reqtfm(req);
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(aead);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
struct aead_request *subreq = aead_request_ctx(req);
|
|
|
|
|
|
|
|
aead_request_set_tfm(subreq, ctx->fback);
|
|
|
|
aead_request_set_callback(subreq, req->base.flags, req->base.complete,
|
|
|
|
req->base.data);
|
|
|
|
aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
|
|
|
|
req->iv);
|
|
|
|
aead_request_set_ad(subreq, req->assoclen);
|
|
|
|
|
|
|
|
return (dir == SAFEXCEL_ENCRYPT) ?
|
|
|
|
crypto_aead_encrypt(subreq) :
|
|
|
|
crypto_aead_decrypt(subreq);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
|
|
|
|
/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
|
|
if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
|
|
|
|
return -EINVAL;
|
|
|
|
else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
|
|
|
|
return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
|
|
|
|
|
|
|
|
/* HW cannot do full (AAD+payload) zero length, use fallback */
|
|
|
|
return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
|
|
|
|
|
|
|
|
/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
|
|
|
|
if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
|
|
|
|
return -EINVAL;
|
|
|
|
else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
|
|
|
|
/* If input length > 0 only */
|
|
|
|
return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
|
|
|
|
|
|
|
|
/* HW cannot do full (AAD+payload) zero length, use fallback */
|
|
|
|
return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_fallback_cra_init(tfm);
|
|
|
|
ctx->alg = SAFEXCEL_SM4;
|
|
|
|
ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
|
|
|
|
ctx->state_sz = SM3_DIGEST_SIZE;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_fallback_setkey,
|
|
|
|
.setauthsize = safexcel_aead_fallback_setauthsize,
|
|
|
|
.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
|
|
|
|
.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
|
|
|
|
.ivsize = SM4_BLOCK_SIZE,
|
|
|
|
.maxauthsize = SM3_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sm3),cbc(sm4))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY |
|
|
|
|
CRYPTO_ALG_NEED_FALLBACK,
|
|
|
|
.cra_blocksize = SM4_BLOCK_SIZE,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_fallback_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sm4cbc_sha1_cra_init(tfm);
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
|
|
|
.maxauthsize = SHA1_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
safexcel_aead_sm4cbc_sm3_cra_init(tfm);
|
|
|
|
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_aead_setkey,
|
|
|
|
.encrypt = safexcel_aead_encrypt,
|
|
|
|
.decrypt = safexcel_aead_decrypt,
|
|
|
|
.ivsize = CTR_RFC3686_IV_SIZE,
|
|
|
|
.maxauthsize = SM3_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
|
|
|
|
.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-17 17:07:59 +07:00
|
|
|
|
|
|
|
static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
|
|
|
|
unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
/* last 4 bytes of key are the nonce! */
|
|
|
|
ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
|
|
|
|
|
|
|
|
len -= CTR_RFC3686_NONCE_SIZE;
|
|
|
|
return safexcel_aead_gcm_setkey(ctfm, key, len);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
|
|
|
|
unsigned int authsize)
|
|
|
|
{
|
|
|
|
return crypto_rfc4106_check_authsize(authsize);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4106_encrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
return crypto_ipsec_check_assoclen(req->assoclen) ?:
|
|
|
|
safexcel_aead_encrypt(req);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4106_decrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
return crypto_ipsec_check_assoclen(req->assoclen) ?:
|
|
|
|
safexcel_aead_decrypt(req);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
ret = safexcel_aead_gcm_cra_init(tfm);
|
|
|
|
ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_rfc4106_gcm_setkey,
|
|
|
|
.setauthsize = safexcel_rfc4106_gcm_setauthsize,
|
|
|
|
.encrypt = safexcel_rfc4106_encrypt,
|
|
|
|
.decrypt = safexcel_rfc4106_decrypt,
|
|
|
|
.ivsize = GCM_RFC4106_IV_SIZE,
|
|
|
|
.maxauthsize = GHASH_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "rfc4106(gcm(aes))",
|
|
|
|
.cra_driver_name = "safexcel-rfc4106-gcm-aes",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_rfc4106_gcm_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_gcm_cra_exit,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-17 17:08:00 +07:00
|
|
|
|
|
|
|
static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
|
|
|
|
unsigned int authsize)
|
|
|
|
{
|
|
|
|
if (authsize != GHASH_DIGEST_SIZE)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
ret = safexcel_aead_gcm_cra_init(tfm);
|
|
|
|
ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_rfc4106_gcm_setkey,
|
|
|
|
.setauthsize = safexcel_rfc4543_gcm_setauthsize,
|
|
|
|
.encrypt = safexcel_rfc4106_encrypt,
|
|
|
|
.decrypt = safexcel_rfc4106_decrypt,
|
|
|
|
.ivsize = GCM_RFC4543_IV_SIZE,
|
|
|
|
.maxauthsize = GHASH_DIGEST_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "rfc4543(gcm(aes))",
|
|
|
|
.cra_driver_name = "safexcel-rfc4543-gcm-aes",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_rfc4543_gcm_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_gcm_cra_exit,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
2019-09-17 17:08:01 +07:00
|
|
|
|
|
|
|
static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
|
|
|
|
unsigned int len)
|
|
|
|
{
|
|
|
|
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
|
|
|
|
/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
|
|
|
|
*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
|
|
|
|
/* last 3 bytes of key are the nonce! */
|
|
|
|
memcpy((u8 *)&ctx->nonce + 1, key + len -
|
|
|
|
EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
|
|
|
|
EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
|
|
|
|
|
|
|
|
len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
|
|
|
|
return safexcel_aead_ccm_setkey(ctfm, key, len);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
|
|
|
|
unsigned int authsize)
|
|
|
|
{
|
|
|
|
/* Borrowed from crypto/ccm.c */
|
|
|
|
switch (authsize) {
|
|
|
|
case 8:
|
|
|
|
case 12:
|
|
|
|
case 16:
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return -EINVAL;
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
|
|
|
|
/* Borrowed from crypto/ccm.c */
|
|
|
|
if (req->assoclen != 16 && req->assoclen != 20)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_req *creq = aead_request_ctx(req);
|
|
|
|
|
|
|
|
/* Borrowed from crypto/ccm.c */
|
|
|
|
if (req->assoclen != 16 && req->assoclen != 20)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
|
|
|
|
{
|
|
|
|
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
ret = safexcel_aead_ccm_cra_init(tfm);
|
|
|
|
ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
|
|
|
|
.type = SAFEXCEL_ALG_TYPE_AEAD,
|
|
|
|
.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
|
|
|
|
.alg.aead = {
|
|
|
|
.setkey = safexcel_rfc4309_ccm_setkey,
|
|
|
|
.setauthsize = safexcel_rfc4309_ccm_setauthsize,
|
|
|
|
.encrypt = safexcel_rfc4309_ccm_encrypt,
|
|
|
|
.decrypt = safexcel_rfc4309_ccm_decrypt,
|
|
|
|
.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
|
|
|
|
.maxauthsize = AES_BLOCK_SIZE,
|
|
|
|
.base = {
|
|
|
|
.cra_name = "rfc4309(ccm(aes))",
|
|
|
|
.cra_driver_name = "safexcel-rfc4309-ccm-aes",
|
|
|
|
.cra_priority = SAFEXCEL_CRA_PRIORITY,
|
|
|
|
.cra_flags = CRYPTO_ALG_ASYNC |
|
|
|
|
CRYPTO_ALG_KERN_DRIVER_ONLY,
|
|
|
|
.cra_blocksize = 1,
|
|
|
|
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
|
|
|
|
.cra_alignmask = 0,
|
|
|
|
.cra_init = safexcel_rfc4309_ccm_cra_init,
|
|
|
|
.cra_exit = safexcel_aead_cra_exit,
|
|
|
|
.cra_module = THIS_MODULE,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|