2019-05-27 13:55:01 +07:00
|
|
|
/* SPDX-License-Identifier: GPL-2.0-or-later */
|
2008-07-07 21:19:53 +07:00
|
|
|
/*
|
|
|
|
* Hash algorithms.
|
|
|
|
*
|
|
|
|
* Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef _CRYPTO_INTERNAL_HASH_H
|
|
|
|
#define _CRYPTO_INTERNAL_HASH_H
|
|
|
|
|
|
|
|
#include <crypto/algapi.h>
|
2008-07-10 15:01:22 +07:00
|
|
|
#include <crypto/hash.h>
|
2008-07-07 21:19:53 +07:00
|
|
|
|
|
|
|
struct ahash_request;
|
|
|
|
struct scatterlist;
|
|
|
|
|
|
|
|
struct crypto_hash_walk {
|
|
|
|
char *data;
|
|
|
|
|
|
|
|
unsigned int offset;
|
|
|
|
unsigned int alignmask;
|
|
|
|
|
|
|
|
struct page *pg;
|
|
|
|
unsigned int entrylen;
|
|
|
|
|
|
|
|
unsigned int total;
|
|
|
|
struct scatterlist *sg;
|
|
|
|
|
|
|
|
unsigned int flags;
|
|
|
|
};
|
|
|
|
|
2009-07-14 13:06:06 +07:00
|
|
|
struct ahash_instance {
|
2020-01-03 11:04:35 +07:00
|
|
|
void (*free)(struct ahash_instance *inst);
|
2020-01-03 10:58:44 +07:00
|
|
|
union {
|
|
|
|
struct {
|
|
|
|
char head[offsetof(struct ahash_alg, halg.base)];
|
|
|
|
struct crypto_instance base;
|
|
|
|
} s;
|
|
|
|
struct ahash_alg alg;
|
|
|
|
};
|
2009-07-14 13:06:06 +07:00
|
|
|
};
|
|
|
|
|
2009-07-07 14:17:12 +07:00
|
|
|
struct shash_instance {
|
2020-01-03 11:04:35 +07:00
|
|
|
void (*free)(struct shash_instance *inst);
|
2020-01-03 10:58:43 +07:00
|
|
|
union {
|
|
|
|
struct {
|
|
|
|
char head[offsetof(struct shash_alg, base)];
|
|
|
|
struct crypto_instance base;
|
|
|
|
} s;
|
|
|
|
struct shash_alg alg;
|
|
|
|
};
|
2009-07-07 14:17:12 +07:00
|
|
|
};
|
|
|
|
|
2009-07-14 13:06:06 +07:00
|
|
|
struct crypto_ahash_spawn {
|
|
|
|
struct crypto_spawn base;
|
|
|
|
};
|
|
|
|
|
2009-07-08 16:21:37 +07:00
|
|
|
struct crypto_shash_spawn {
|
|
|
|
struct crypto_spawn base;
|
|
|
|
};
|
|
|
|
|
2008-07-07 21:19:53 +07:00
|
|
|
int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err);
|
|
|
|
int crypto_hash_walk_first(struct ahash_request *req,
|
|
|
|
struct crypto_hash_walk *walk);
|
2014-05-21 19:56:12 +07:00
|
|
|
int crypto_ahash_walk_first(struct ahash_request *req,
|
|
|
|
struct crypto_hash_walk *walk);
|
2008-07-07 21:19:53 +07:00
|
|
|
|
2014-05-21 19:56:12 +07:00
|
|
|
static inline int crypto_ahash_walk_done(struct crypto_hash_walk *walk,
|
|
|
|
int err)
|
|
|
|
{
|
|
|
|
return crypto_hash_walk_done(walk, err);
|
|
|
|
}
|
|
|
|
|
2009-07-15 11:40:40 +07:00
|
|
|
static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk)
|
|
|
|
{
|
|
|
|
return !(walk->entrylen | walk->total);
|
|
|
|
}
|
|
|
|
|
2014-05-21 19:56:12 +07:00
|
|
|
static inline int crypto_ahash_walk_last(struct crypto_hash_walk *walk)
|
|
|
|
{
|
|
|
|
return crypto_hash_walk_last(walk);
|
|
|
|
}
|
|
|
|
|
2009-07-14 11:28:26 +07:00
|
|
|
int crypto_register_ahash(struct ahash_alg *alg);
|
2019-12-16 06:51:19 +07:00
|
|
|
void crypto_unregister_ahash(struct ahash_alg *alg);
|
2017-08-10 19:53:52 +07:00
|
|
|
int crypto_register_ahashes(struct ahash_alg *algs, int count);
|
|
|
|
void crypto_unregister_ahashes(struct ahash_alg *algs, int count);
|
2009-07-14 13:06:06 +07:00
|
|
|
int ahash_register_instance(struct crypto_template *tmpl,
|
|
|
|
struct ahash_instance *inst);
|
|
|
|
|
crypto: hmac - require that the underlying hash algorithm is unkeyed
Because the HMAC template didn't check that its underlying hash
algorithm is unkeyed, trying to use "hmac(hmac(sha3-512-generic))"
through AF_ALG or through KEYCTL_DH_COMPUTE resulted in the inner HMAC
being used without having been keyed, resulting in sha3_update() being
called without sha3_init(), causing a stack buffer overflow.
This is a very old bug, but it seems to have only started causing real
problems when SHA-3 support was added (requires CONFIG_CRYPTO_SHA3)
because the innermost hash's state is ->import()ed from a zeroed buffer,
and it just so happens that other hash algorithms are fine with that,
but SHA-3 is not. However, there could be arch or hardware-dependent
hash algorithms also affected; I couldn't test everything.
Fix the bug by introducing a function crypto_shash_alg_has_setkey()
which tests whether a shash algorithm is keyed. Then update the HMAC
template to require that its underlying hash algorithm is unkeyed.
Here is a reproducer:
#include <linux/if_alg.h>
#include <sys/socket.h>
int main()
{
int algfd;
struct sockaddr_alg addr = {
.salg_type = "hash",
.salg_name = "hmac(hmac(sha3-512-generic))",
};
char key[4096] = { 0 };
algfd = socket(AF_ALG, SOCK_SEQPACKET, 0);
bind(algfd, (const struct sockaddr *)&addr, sizeof(addr));
setsockopt(algfd, SOL_ALG, ALG_SET_KEY, key, sizeof(key));
}
Here was the KASAN report from syzbot:
BUG: KASAN: stack-out-of-bounds in memcpy include/linux/string.h:341 [inline]
BUG: KASAN: stack-out-of-bounds in sha3_update+0xdf/0x2e0 crypto/sha3_generic.c:161
Write of size 4096 at addr ffff8801cca07c40 by task syzkaller076574/3044
CPU: 1 PID: 3044 Comm: syzkaller076574 Not tainted 4.14.0-mm1+ #25
Hardware name: Google Google Compute Engine/Google Compute Engine, BIOS Google 01/01/2011
Call Trace:
__dump_stack lib/dump_stack.c:17 [inline]
dump_stack+0x194/0x257 lib/dump_stack.c:53
print_address_description+0x73/0x250 mm/kasan/report.c:252
kasan_report_error mm/kasan/report.c:351 [inline]
kasan_report+0x25b/0x340 mm/kasan/report.c:409
check_memory_region_inline mm/kasan/kasan.c:260 [inline]
check_memory_region+0x137/0x190 mm/kasan/kasan.c:267
memcpy+0x37/0x50 mm/kasan/kasan.c:303
memcpy include/linux/string.h:341 [inline]
sha3_update+0xdf/0x2e0 crypto/sha3_generic.c:161
crypto_shash_update+0xcb/0x220 crypto/shash.c:109
shash_finup_unaligned+0x2a/0x60 crypto/shash.c:151
crypto_shash_finup+0xc4/0x120 crypto/shash.c:165
hmac_finup+0x182/0x330 crypto/hmac.c:152
crypto_shash_finup+0xc4/0x120 crypto/shash.c:165
shash_digest_unaligned+0x9e/0xd0 crypto/shash.c:172
crypto_shash_digest+0xc4/0x120 crypto/shash.c:186
hmac_setkey+0x36a/0x690 crypto/hmac.c:66
crypto_shash_setkey+0xad/0x190 crypto/shash.c:64
shash_async_setkey+0x47/0x60 crypto/shash.c:207
crypto_ahash_setkey+0xaf/0x180 crypto/ahash.c:200
hash_setkey+0x40/0x90 crypto/algif_hash.c:446
alg_setkey crypto/af_alg.c:221 [inline]
alg_setsockopt+0x2a1/0x350 crypto/af_alg.c:254
SYSC_setsockopt net/socket.c:1851 [inline]
SyS_setsockopt+0x189/0x360 net/socket.c:1830
entry_SYSCALL_64_fastpath+0x1f/0x96
Reported-by: syzbot <syzkaller@googlegroups.com>
Cc: <stable@vger.kernel.org>
Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
2017-11-29 09:01:38 +07:00
|
|
|
int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
|
|
|
|
unsigned int keylen);
|
|
|
|
|
|
|
|
static inline bool crypto_shash_alg_has_setkey(struct shash_alg *alg)
|
|
|
|
{
|
|
|
|
return alg->setkey != shash_no_setkey;
|
|
|
|
}
|
|
|
|
|
2019-11-30 02:35:22 +07:00
|
|
|
static inline bool crypto_shash_alg_needs_key(struct shash_alg *alg)
|
|
|
|
{
|
|
|
|
return crypto_shash_alg_has_setkey(alg) &&
|
|
|
|
!(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY);
|
|
|
|
}
|
|
|
|
|
2018-01-04 02:16:22 +07:00
|
|
|
bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg);
|
|
|
|
|
2020-01-03 10:58:50 +07:00
|
|
|
int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
|
|
|
|
struct crypto_instance *inst,
|
|
|
|
const char *name, u32 type, u32 mask);
|
|
|
|
|
2009-07-14 17:30:24 +07:00
|
|
|
static inline void crypto_drop_ahash(struct crypto_ahash_spawn *spawn)
|
|
|
|
{
|
|
|
|
crypto_drop_spawn(&spawn->base);
|
|
|
|
}
|
|
|
|
|
2020-01-03 10:58:50 +07:00
|
|
|
static inline struct hash_alg_common *crypto_spawn_ahash_alg(
|
|
|
|
struct crypto_ahash_spawn *spawn)
|
|
|
|
{
|
|
|
|
return __crypto_hash_alg_common(spawn->base.alg);
|
|
|
|
}
|
|
|
|
|
2008-08-31 12:47:27 +07:00
|
|
|
int crypto_register_shash(struct shash_alg *alg);
|
2019-12-16 06:51:19 +07:00
|
|
|
void crypto_unregister_shash(struct shash_alg *alg);
|
2012-07-11 18:20:20 +07:00
|
|
|
int crypto_register_shashes(struct shash_alg *algs, int count);
|
2019-12-16 06:51:19 +07:00
|
|
|
void crypto_unregister_shashes(struct shash_alg *algs, int count);
|
2009-07-08 17:46:23 +07:00
|
|
|
int shash_register_instance(struct crypto_template *tmpl,
|
|
|
|
struct shash_instance *inst);
|
2020-01-03 11:04:38 +07:00
|
|
|
void shash_free_singlespawn_instance(struct shash_instance *inst);
|
2009-07-07 14:17:12 +07:00
|
|
|
|
2020-01-03 10:58:49 +07:00
|
|
|
int crypto_grab_shash(struct crypto_shash_spawn *spawn,
|
|
|
|
struct crypto_instance *inst,
|
|
|
|
const char *name, u32 type, u32 mask);
|
|
|
|
|
2009-07-14 17:30:24 +07:00
|
|
|
static inline void crypto_drop_shash(struct crypto_shash_spawn *spawn)
|
|
|
|
{
|
|
|
|
crypto_drop_spawn(&spawn->base);
|
|
|
|
}
|
|
|
|
|
2020-01-03 10:58:49 +07:00
|
|
|
static inline struct shash_alg *crypto_spawn_shash_alg(
|
|
|
|
struct crypto_shash_spawn *spawn)
|
|
|
|
{
|
|
|
|
return __crypto_shash_alg(spawn->base.alg);
|
|
|
|
}
|
|
|
|
|
2009-07-12 20:25:20 +07:00
|
|
|
int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc);
|
2009-07-15 11:40:40 +07:00
|
|
|
int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc);
|
2009-07-12 20:25:20 +07:00
|
|
|
int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc);
|
2014-08-01 00:29:51 +07:00
|
|
|
|
2009-07-14 11:28:26 +07:00
|
|
|
int crypto_init_shash_ops_async(struct crypto_tfm *tfm);
|
|
|
|
|
2008-07-10 15:01:22 +07:00
|
|
|
static inline void *crypto_ahash_ctx(struct crypto_ahash *tfm)
|
|
|
|
{
|
2009-07-14 11:28:26 +07:00
|
|
|
return crypto_tfm_ctx(crypto_ahash_tfm(tfm));
|
2008-07-10 15:01:22 +07:00
|
|
|
}
|
|
|
|
|
2009-07-14 19:21:46 +07:00
|
|
|
static inline struct ahash_alg *__crypto_ahash_alg(struct crypto_alg *alg)
|
|
|
|
{
|
|
|
|
return container_of(__crypto_hash_alg_common(alg), struct ahash_alg,
|
|
|
|
halg);
|
|
|
|
}
|
|
|
|
|
2009-07-12 22:05:48 +07:00
|
|
|
static inline void crypto_ahash_set_reqsize(struct crypto_ahash *tfm,
|
|
|
|
unsigned int reqsize)
|
|
|
|
{
|
2009-07-14 11:28:26 +07:00
|
|
|
tfm->reqsize = reqsize;
|
2009-07-12 22:05:48 +07:00
|
|
|
}
|
|
|
|
|
2009-07-14 13:06:06 +07:00
|
|
|
static inline struct crypto_instance *ahash_crypto_instance(
|
|
|
|
struct ahash_instance *inst)
|
|
|
|
{
|
2020-01-03 10:58:44 +07:00
|
|
|
return &inst->s.base;
|
2009-07-14 13:06:06 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline struct ahash_instance *ahash_instance(
|
|
|
|
struct crypto_instance *inst)
|
|
|
|
{
|
2020-01-03 10:58:44 +07:00
|
|
|
return container_of(inst, struct ahash_instance, s.base);
|
2009-07-14 13:06:06 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void *ahash_instance_ctx(struct ahash_instance *inst)
|
|
|
|
{
|
|
|
|
return crypto_instance_ctx(ahash_crypto_instance(inst));
|
|
|
|
}
|
|
|
|
|
2017-04-10 16:27:57 +07:00
|
|
|
static inline void ahash_request_complete(struct ahash_request *req, int err)
|
|
|
|
{
|
|
|
|
req->base.complete(&req->base, err);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline u32 ahash_request_flags(struct ahash_request *req)
|
|
|
|
{
|
|
|
|
return req->base.flags;
|
|
|
|
}
|
|
|
|
|
2009-07-14 13:06:06 +07:00
|
|
|
static inline struct crypto_ahash *crypto_spawn_ahash(
|
|
|
|
struct crypto_ahash_spawn *spawn)
|
|
|
|
{
|
|
|
|
return crypto_spawn_tfm2(&spawn->base);
|
|
|
|
}
|
|
|
|
|
2008-07-10 15:01:22 +07:00
|
|
|
static inline int ahash_enqueue_request(struct crypto_queue *queue,
|
|
|
|
struct ahash_request *request)
|
|
|
|
{
|
|
|
|
return crypto_enqueue_request(queue, &request->base);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline struct ahash_request *ahash_dequeue_request(
|
|
|
|
struct crypto_queue *queue)
|
|
|
|
{
|
|
|
|
return ahash_request_cast(crypto_dequeue_request(queue));
|
|
|
|
}
|
|
|
|
|
2008-08-31 12:47:27 +07:00
|
|
|
static inline void *crypto_shash_ctx(struct crypto_shash *tfm)
|
|
|
|
{
|
|
|
|
return crypto_tfm_ctx(&tfm->base);
|
|
|
|
}
|
|
|
|
|
2009-07-07 14:17:12 +07:00
|
|
|
static inline struct crypto_instance *shash_crypto_instance(
|
|
|
|
struct shash_instance *inst)
|
|
|
|
{
|
2020-01-03 10:58:43 +07:00
|
|
|
return &inst->s.base;
|
2009-07-07 14:17:12 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline struct shash_instance *shash_instance(
|
|
|
|
struct crypto_instance *inst)
|
|
|
|
{
|
2020-01-03 10:58:43 +07:00
|
|
|
return container_of(inst, struct shash_instance, s.base);
|
2009-07-07 14:17:12 +07:00
|
|
|
}
|
|
|
|
|
2019-12-08 12:42:53 +07:00
|
|
|
static inline struct shash_instance *shash_alg_instance(
|
|
|
|
struct crypto_shash *shash)
|
|
|
|
{
|
|
|
|
return shash_instance(crypto_tfm_alg_instance(&shash->base));
|
|
|
|
}
|
|
|
|
|
2009-07-09 10:32:55 +07:00
|
|
|
static inline void *shash_instance_ctx(struct shash_instance *inst)
|
|
|
|
{
|
|
|
|
return crypto_instance_ctx(shash_crypto_instance(inst));
|
|
|
|
}
|
|
|
|
|
2009-07-08 16:21:37 +07:00
|
|
|
static inline struct crypto_shash *crypto_spawn_shash(
|
|
|
|
struct crypto_shash_spawn *spawn)
|
|
|
|
{
|
|
|
|
return crypto_spawn_tfm2(&spawn->base);
|
|
|
|
}
|
|
|
|
|
2009-07-08 21:32:07 +07:00
|
|
|
static inline void *crypto_shash_ctx_aligned(struct crypto_shash *tfm)
|
|
|
|
{
|
|
|
|
return crypto_tfm_ctx_aligned(&tfm->base);
|
|
|
|
}
|
|
|
|
|
2009-07-08 21:36:36 +07:00
|
|
|
static inline struct crypto_shash *__crypto_shash_cast(struct crypto_tfm *tfm)
|
|
|
|
{
|
2009-07-14 11:50:12 +07:00
|
|
|
return container_of(tfm, struct crypto_shash, base);
|
2009-07-08 21:36:36 +07:00
|
|
|
}
|
|
|
|
|
2008-07-07 21:19:53 +07:00
|
|
|
#endif /* _CRYPTO_INTERNAL_HASH_H */
|
|
|
|
|