summaryrefslogtreecommitdiff
path: root/target/linux/apm821xx/patches-4.14/022-0005-crypto-crypto4xx-add-aes-ctr-support.patch
blob: e93a3d916b9f5bedef46d3d7de315877cb0aed7c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
From 98e87e3d933b8e504ea41b8857c038d2cd06cddc Mon Sep 17 00:00:00 2001
From: Christian Lamparter <chunkeey@gmail.com>
Date: Thu, 19 Apr 2018 18:41:54 +0200
Subject: [PATCH 5/8] crypto: crypto4xx - add aes-ctr support

This patch adds support for the aes-ctr skcipher.

name         : ctr(aes)
driver       : ctr-aes-ppc4xx
module       : crypto4xx
priority     : 300
refcnt       : 1
selftest     : passed
internal     : no
type         : skcipher
async        : yes
blocksize    : 16
min keysize  : 16
max keysize  : 32
ivsize       : 16
chunksize    : 16
walksize     : 16

The hardware uses only the last 32-bits as the counter while the
kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
the whole IV is a counter. To make this work, the driver will
fallback if the counter is going to overlow.

The aead's crypto4xx_setup_fallback() function is renamed to
crypto4xx_aead_setup_fallback.

Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
---
 drivers/crypto/amcc/crypto4xx_alg.c  | 91 ++++++++++++++++++++++++++--
 drivers/crypto/amcc/crypto4xx_core.c | 37 +++++++++++
 drivers/crypto/amcc/crypto4xx_core.h |  5 ++
 3 files changed, 127 insertions(+), 6 deletions(-)

--- a/drivers/crypto/amcc/crypto4xx_alg.c
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
@@ -240,6 +240,85 @@ int crypto4xx_rfc3686_decrypt(struct skc
 				  ctx->sa_out, ctx->sa_len, 0);
 }
 
+static int
+crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
+{
+	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
+	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
+	size_t iv_len = crypto_skcipher_ivsize(cipher);
+	unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
+	unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
+			AES_BLOCK_SIZE;
+
+	/*
+	 * The hardware uses only the last 32-bits as the counter while the
+	 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
+	 * the whole IV is a counter.  So fallback if the counter is going to
+	 * overlow.
+	 */
+	if (counter + nblks < counter) {
+		struct skcipher_request *subreq = skcipher_request_ctx(req);
+		int ret;
+
+		skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher);
+		skcipher_request_set_callback(subreq, req->base.flags,
+			NULL, NULL);
+		skcipher_request_set_crypt(subreq, req->src, req->dst,
+			req->cryptlen, req->iv);
+		ret = encrypt ? crypto_skcipher_encrypt(subreq)
+			: crypto_skcipher_decrypt(subreq);
+		skcipher_request_zero(subreq);
+		return ret;
+	}
+
+	return encrypt ? crypto4xx_encrypt_iv(req)
+		       : crypto4xx_decrypt_iv(req);
+}
+
+static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
+				       struct crypto_skcipher *cipher,
+				       const u8 *key,
+				       unsigned int keylen)
+{
+	int rc;
+
+	crypto_skcipher_clear_flags(ctx->sw_cipher.cipher,
+				    CRYPTO_TFM_REQ_MASK);
+	crypto_skcipher_set_flags(ctx->sw_cipher.cipher,
+		crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
+	rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
+	crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
+	crypto_skcipher_set_flags(cipher,
+		crypto_skcipher_get_flags(ctx->sw_cipher.cipher) &
+			CRYPTO_TFM_RES_MASK);
+
+	return rc;
+}
+
+int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
+			     const u8 *key, unsigned int keylen)
+{
+	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
+	int rc;
+
+	rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
+	if (rc)
+		return rc;
+
+	return crypto4xx_setkey_aes(cipher, key, keylen,
+		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
+}
+
+int crypto4xx_encrypt_ctr(struct skcipher_request *req)
+{
+	return crypto4xx_ctr_crypt(req, true);
+}
+
+int crypto4xx_decrypt_ctr(struct skcipher_request *req)
+{
+	return crypto4xx_ctr_crypt(req, false);
+}
+
 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
 						bool is_ccm, bool decrypt)
 {
@@ -282,10 +361,10 @@ static int crypto4xx_aead_fallback(struc
 			    crypto_aead_encrypt(subreq);
 }
 
-static int crypto4xx_setup_fallback(struct crypto4xx_ctx *ctx,
-				    struct crypto_aead *cipher,
-				    const u8 *key,
-				    unsigned int keylen)
+static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
+					 struct crypto_aead *cipher,
+					 const u8 *key,
+					 unsigned int keylen)
 {
 	int rc;
 
@@ -313,7 +392,7 @@ int crypto4xx_setkey_aes_ccm(struct cryp
 	struct dynamic_sa_ctl *sa;
 	int rc = 0;
 
-	rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen);
+	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
 	if (rc)
 		return rc;
 
@@ -472,7 +551,7 @@ int crypto4xx_setkey_aes_gcm(struct cryp
 		return -EINVAL;
 	}
 
-	rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen);
+	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
 	if (rc)
 		return rc;
 
--- a/drivers/crypto/amcc/crypto4xx_core.c
+++ b/drivers/crypto/amcc/crypto4xx_core.c
@@ -941,6 +941,19 @@ static int crypto4xx_sk_init(struct cryp
 	struct crypto4xx_alg *amcc_alg;
 	struct crypto4xx_ctx *ctx =  crypto_skcipher_ctx(sk);
 
+	if (alg->base.cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
+		ctx->sw_cipher.cipher =
+			crypto_alloc_skcipher(alg->base.cra_name, 0,
+					      CRYPTO_ALG_NEED_FALLBACK |
+					      CRYPTO_ALG_ASYNC);
+		if (IS_ERR(ctx->sw_cipher.cipher))
+			return PTR_ERR(ctx->sw_cipher.cipher);
+
+		crypto_skcipher_set_reqsize(sk,
+			sizeof(struct skcipher_request) + 32 +
+			crypto_skcipher_reqsize(ctx->sw_cipher.cipher));
+	}
+
 	amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
 	crypto4xx_ctx_init(amcc_alg, ctx);
 	return 0;
@@ -956,6 +969,8 @@ static void crypto4xx_sk_exit(struct cry
 	struct crypto4xx_ctx *ctx =  crypto_skcipher_ctx(sk);
 
 	crypto4xx_common_exit(ctx);
+	if (ctx->sw_cipher.cipher)
+		crypto_free_skcipher(ctx->sw_cipher.cipher);
 }
 
 static int crypto4xx_aead_init(struct crypto_aead *tfm)
@@ -1145,6 +1160,28 @@ static struct crypto4xx_alg_common crypt
 		.init = crypto4xx_sk_init,
 		.exit = crypto4xx_sk_exit,
 	} },
+	{ .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
+		.base = {
+			.cra_name = "ctr(aes)",
+			.cra_driver_name = "ctr-aes-ppc4xx",
+			.cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
+			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
+				CRYPTO_ALG_NEED_FALLBACK |
+				CRYPTO_ALG_ASYNC |
+				CRYPTO_ALG_KERN_DRIVER_ONLY,
+			.cra_blocksize = AES_BLOCK_SIZE,
+			.cra_ctxsize = sizeof(struct crypto4xx_ctx),
+			.cra_module = THIS_MODULE,
+		},
+		.min_keysize = AES_MIN_KEY_SIZE,
+		.max_keysize = AES_MAX_KEY_SIZE,
+		.ivsize	= AES_IV_SIZE,
+		.setkey	= crypto4xx_setkey_aes_ctr,
+		.encrypt = crypto4xx_encrypt_ctr,
+		.decrypt = crypto4xx_decrypt_ctr,
+		.init = crypto4xx_sk_init,
+		.exit = crypto4xx_sk_exit,
+	} },
 	{ .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
 		.base = {
 			.cra_name = "rfc3686(ctr(aes))",
--- a/drivers/crypto/amcc/crypto4xx_core.h
+++ b/drivers/crypto/amcc/crypto4xx_core.h
@@ -128,6 +128,7 @@ struct crypto4xx_ctx {
 	__le32 iv_nonce;
 	u32 sa_len;
 	union {
+		struct crypto_skcipher *cipher;
 		struct crypto_aead *aead;
 	} sw_cipher;
 };
@@ -163,12 +164,16 @@ int crypto4xx_setkey_aes_cbc(struct cryp
 			     const u8 *key, unsigned int keylen);
 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
 			     const u8 *key, unsigned int keylen);
+int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
+			     const u8 *key, unsigned int keylen);
 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
 			     const u8 *key, unsigned int keylen);
 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
 			     const u8 *key, unsigned int keylen);
 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
 			     const u8 *key, unsigned int keylen);
+int crypto4xx_encrypt_ctr(struct skcipher_request *req);
+int crypto4xx_decrypt_ctr(struct skcipher_request *req);
 int crypto4xx_encrypt_iv(struct skcipher_request *req);
 int crypto4xx_decrypt_iv(struct skcipher_request *req);
 int crypto4xx_encrypt_noiv(struct skcipher_request *req);