1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Cryptographic API.
4 *
5 * Single-block cipher operations.
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 */
10
11#include <crypto/algapi.h>
12#include <crypto/internal/cipher.h>
13#include <linux/kernel.h>
14#include <linux/crypto.h>
15#include <linux/errno.h>
16#include <linux/slab.h>
17#include <linux/string.h>
18#include "internal.h"
19
20static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
21			    unsigned int keylen)
22{
23	struct cipher_alg *cia = crypto_cipher_alg(tfm);
24	unsigned long alignmask = crypto_cipher_alignmask(tfm);
25	int ret;
26	u8 *buffer, *alignbuffer;
27	unsigned long absize;
28
29	absize = keylen + alignmask;
30	buffer = kmalloc(absize, GFP_ATOMIC);
31	if (!buffer)
32		return -ENOMEM;
33
34	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
35	memcpy(alignbuffer, key, keylen);
36	ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
37	kfree_sensitive(buffer);
38	return ret;
39
40}
41
42int crypto_cipher_setkey(struct crypto_cipher *tfm,
43			 const u8 *key, unsigned int keylen)
44{
45	struct cipher_alg *cia = crypto_cipher_alg(tfm);
46	unsigned long alignmask = crypto_cipher_alignmask(tfm);
47
48	if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
49		return -EINVAL;
50
51	if ((unsigned long)key & alignmask)
52		return setkey_unaligned(tfm, key, keylen);
53
54	return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
55}
56EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL);
57
58static inline void cipher_crypt_one(struct crypto_cipher *tfm,
59				    u8 *dst, const u8 *src, bool enc)
60{
61	unsigned long alignmask = crypto_cipher_alignmask(tfm);
62	struct cipher_alg *cia = crypto_cipher_alg(tfm);
63	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
64		enc ? cia->cia_encrypt : cia->cia_decrypt;
65
66	if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
67		unsigned int bs = crypto_cipher_blocksize(tfm);
68		u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
69		u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
70
71		memcpy(tmp, src, bs);
72		fn(crypto_cipher_tfm(tfm), tmp, tmp);
73		memcpy(dst, tmp, bs);
74	} else {
75		fn(crypto_cipher_tfm(tfm), dst, src);
76	}
77}
78
79void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
80			       u8 *dst, const u8 *src)
81{
82	cipher_crypt_one(tfm, dst, src, true);
83}
84EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL);
85
86void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
87			       u8 *dst, const u8 *src)
88{
89	cipher_crypt_one(tfm, dst, src, false);
90}
91EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL);
92
93struct crypto_cipher *crypto_clone_cipher(struct crypto_cipher *cipher)
94{
95	struct crypto_tfm *tfm = crypto_cipher_tfm(cipher);
96	struct crypto_alg *alg = tfm->__crt_alg;
97	struct crypto_cipher *ncipher;
98	struct crypto_tfm *ntfm;
99
100	if (alg->cra_init)
101		return ERR_PTR(-ENOSYS);
102
103	if (unlikely(!crypto_mod_get(alg)))
104		return ERR_PTR(-ESTALE);
105
106	ntfm = __crypto_alloc_tfmgfp(alg, CRYPTO_ALG_TYPE_CIPHER,
107				     CRYPTO_ALG_TYPE_MASK, GFP_ATOMIC);
108	if (IS_ERR(ntfm)) {
109		crypto_mod_put(alg);
110		return ERR_CAST(ntfm);
111	}
112
113	ntfm->crt_flags = tfm->crt_flags;
114
115	ncipher = __crypto_cipher_cast(ntfm);
116
117	return ncipher;
118}
119EXPORT_SYMBOL_GPL(crypto_clone_cipher);
120